Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
41aa0e80
"vscode:/vscode.git/clone" did not exist on "da6a1b6ca1fc6fa99a1a8c14af5be6fc2b3b02f3"
Commit
41aa0e80
authored
Nov 29, 2019
by
Juha Kiili
Browse files
Refactor logs and fix loss bug
parent
05d4232f
Changes
1
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
8 additions
and
8 deletions
+8
-8
examples/run_glue.py
examples/run_glue.py
+8
-8
No files found.
examples/run_glue.py
View file @
41aa0e80
...
@@ -171,22 +171,22 @@ def train(args, train_dataset, model, tokenizer):
...
@@ -171,22 +171,22 @@ def train(args, train_dataset, model, tokenizer):
global_step
+=
1
global_step
+=
1
if
args
.
local_rank
in
[
-
1
,
0
]
and
args
.
logging_steps
>
0
and
global_step
%
args
.
logging_steps
==
0
:
if
args
.
local_rank
in
[
-
1
,
0
]
and
args
.
logging_steps
>
0
and
global_step
%
args
.
logging_steps
==
0
:
# Log metrics
logs
=
{}
logs
=
{
'step'
:
global_step
}
if
args
.
local_rank
==
-
1
and
args
.
evaluate_during_training
:
# Only evaluate when single GPU otherwise metrics may not average well
if
args
.
local_rank
==
-
1
and
args
.
evaluate_during_training
:
# Only evaluate when single GPU otherwise metrics may not average well
results
=
evaluate
(
args
,
model
,
tokenizer
)
results
=
evaluate
(
args
,
model
,
tokenizer
)
for
key
,
value
in
results
.
items
():
for
key
,
value
in
results
.
items
():
eval_key
=
'eval_{}'
.
format
(
key
)
eval_key
=
'eval_{}'
.
format
(
key
)
tb_writer
.
add_scalar
(
eval_key
,
value
,
global_step
)
logs
[
eval_key
]
=
value
logs
[
eval_key
]
=
str
(
value
)
logging_loss
=
tr_loss
loss_scalar
=
(
tr_loss
-
logging_loss
)
/
args
.
logging_steps
loss_scalar
=
(
tr_loss
-
logging_loss
)
/
args
.
logging_steps
learning_rate_scalar
=
scheduler
.
get_lr
()[
0
]
learning_rate_scalar
=
scheduler
.
get_lr
()[
0
]
tb_writer
.
add_scalar
(
'lr'
,
learning_rate_scalar
,
global_step
)
tb_writer
.
add_scalar
(
'loss'
,
loss_scalar
,
global_step
)
logs
[
'learning_rate'
]
=
learning_rate_scalar
logs
[
'learning_rate'
]
=
learning_rate_scalar
logs
[
'loss'
]
=
loss_scalar
logs
[
'loss'
]
=
loss_scalar
print
(
json
.
dumps
(
logs
))
logging_loss
=
tr_loss
for
key
,
value
in
logs
.
items
():
tb_writer
.
add_scalar
(
key
,
value
,
global_step
)
print
(
json
.
dumps
({
**
logs
,
**
{
'step'
:
global_step
}}))
if
args
.
local_rank
in
[
-
1
,
0
]
and
args
.
save_steps
>
0
and
global_step
%
args
.
save_steps
==
0
:
if
args
.
local_rank
in
[
-
1
,
0
]
and
args
.
save_steps
>
0
and
global_step
%
args
.
save_steps
==
0
:
# Save model checkpoint
# Save model checkpoint
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment