Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
e2770748
Unverified
Commit
e2770748
authored
Jun 28, 2021
by
Stas Bekman
Committed by
GitHub
Jun 28, 2021
Browse files
pass the matching trainer log level to deepspeed (#12401)
parent
7e22609e
Changes
1
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
8 additions
and
3 deletions
+8
-3
src/transformers/deepspeed.py
src/transformers/deepspeed.py
+8
-3
No files found.
src/transformers/deepspeed.py
View file @
e2770748
...
@@ -295,11 +295,13 @@ def deepspeed_init(trainer, num_training_steps, resume_from_checkpoint=None):
...
@@ -295,11 +295,13 @@ def deepspeed_init(trainer, num_training_steps, resume_from_checkpoint=None):
"""
"""
import
deepspeed
import
deepspeed
from
deepspeed.utils
import
logger
as
ds_logger
model
=
trainer
.
model
model
=
trainer
.
model
args
=
trainer
.
args
hf_deepspeed_config
=
trainer
.
args
.
hf_deepspeed_config
hf_deepspeed_config
=
args
.
hf_deepspeed_config
hf_deepspeed_config
.
trainer_config_finalize
(
trainer
.
args
,
model
,
num_training_steps
)
hf_deepspeed_config
.
trainer_config_finalize
(
args
,
model
,
num_training_steps
)
# resume config update - some bits like `model` and `num_training_steps` only become available during train
# resume config update - some bits like `model` and `num_training_steps` only become available during train
config
=
hf_deepspeed_config
.
config
config
=
hf_deepspeed_config
.
config
...
@@ -319,7 +321,7 @@ def deepspeed_init(trainer, num_training_steps, resume_from_checkpoint=None):
...
@@ -319,7 +321,7 @@ def deepspeed_init(trainer, num_training_steps, resume_from_checkpoint=None):
optimizer
=
None
optimizer
=
None
if
"optimizer"
in
config
:
if
"optimizer"
in
config
:
if
trainer
.
args
.
adafactor
:
if
args
.
adafactor
:
raise
ValueError
(
raise
ValueError
(
"--adafactor was passed, but also found `optimizer` configured in the DeepSpeed config. "
"--adafactor was passed, but also found `optimizer` configured in the DeepSpeed config. "
"Only one optimizer can be configured."
"Only one optimizer can be configured."
...
@@ -356,6 +358,9 @@ def deepspeed_init(trainer, num_training_steps, resume_from_checkpoint=None):
...
@@ -356,6 +358,9 @@ def deepspeed_init(trainer, num_training_steps, resume_from_checkpoint=None):
# keep for quick debug:
# keep for quick debug:
# from pprint import pprint; pprint(config)
# from pprint import pprint; pprint(config)
# set the Deepspeed log level consistent with the trainer
ds_logger
.
setLevel
(
args
.
get_process_log_level
())
model_parameters
=
filter
(
lambda
p
:
p
.
requires_grad
,
model
.
parameters
())
model_parameters
=
filter
(
lambda
p
:
p
.
requires_grad
,
model
.
parameters
())
model
,
optimizer
,
_
,
lr_scheduler
=
deepspeed
.
initialize
(
model
,
optimizer
,
_
,
lr_scheduler
=
deepspeed
.
initialize
(
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment