Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
87b9ec38
Commit
87b9ec38
authored
Apr 29, 2019
by
Mathieu Prouveur
Browse files
Fix tr_loss rescaling factor using global_step
parent
ed8fad73
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
5 additions
and
5 deletions
+5
-5
examples/run_classifier.py
examples/run_classifier.py
+3
-3
examples/run_swag.py
examples/run_swag.py
+2
-2
No files found.
examples/run_classifier.py
View file @
87b9ec38
...
...
@@ -845,7 +845,7 @@ def main():
else
:
loss
.
backward
()
tr_loss
+=
loss
.
item
()
*
args
.
gradient_accumulation_steps
tr_loss
+=
loss
.
item
()
nb_tr_examples
+=
input_ids
.
size
(
0
)
nb_tr_steps
+=
1
if
(
step
+
1
)
%
args
.
gradient_accumulation_steps
==
0
:
...
...
@@ -936,7 +936,7 @@ def main():
elif
output_mode
==
"regression"
:
preds
=
np
.
squeeze
(
preds
)
result
=
compute_metrics
(
task_name
,
preds
,
all_label_ids
.
numpy
())
loss
=
tr_loss
/
nb_tr
_step
s
if
args
.
do_train
else
None
loss
=
tr_loss
/
global
_step
if
args
.
do_train
else
None
result
[
'eval_loss'
]
=
eval_loss
result
[
'global_step'
]
=
global_step
...
...
@@ -1004,7 +1004,7 @@ def main():
preds
=
preds
[
0
]
preds
=
np
.
argmax
(
preds
,
axis
=
1
)
result
=
compute_metrics
(
task_name
,
preds
,
all_label_ids
.
numpy
())
loss
=
tr_loss
/
nb_tr
_step
s
if
args
.
do_train
else
None
loss
=
tr_loss
/
global
_step
if
args
.
do_train
else
None
result
[
'eval_loss'
]
=
eval_loss
result
[
'global_step'
]
=
global_step
...
...
examples/run_swag.py
View file @
87b9ec38
...
...
@@ -452,7 +452,7 @@ def main():
loss
=
loss
*
args
.
loss_scale
if
args
.
gradient_accumulation_steps
>
1
:
loss
=
loss
/
args
.
gradient_accumulation_steps
tr_loss
+=
loss
.
item
()
*
args
.
gradient_accumulation_steps
tr_loss
+=
loss
.
item
()
nb_tr_examples
+=
input_ids
.
size
(
0
)
nb_tr_steps
+=
1
...
...
@@ -537,7 +537,7 @@ def main():
result
=
{
'eval_loss'
:
eval_loss
,
'eval_accuracy'
:
eval_accuracy
,
'global_step'
:
global_step
,
'loss'
:
tr_loss
/
nb_tr
_step
s
}
'loss'
:
tr_loss
/
global
_step
}
output_eval_file
=
os
.
path
.
join
(
args
.
output_dir
,
"eval_results.txt"
)
with
open
(
output_eval_file
,
"w"
)
as
writer
:
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment