Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
apex
Commits
68c850d3
Commit
68c850d3
authored
Jun 18, 2019
by
Michael Carilli
Browse files
Fix for
https://github.com/NVIDIA/apex/issues/361
parent
d5e2bb4b
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
6 additions
and
2 deletions
+6
-2
apex/amp/_process_optimizer.py
apex/amp/_process_optimizer.py
+3
-1
apex/amp/handle.py
apex/amp/handle.py
+3
-1
No files found.
apex/amp/_process_optimizer.py
View file @
68c850d3
...
...
@@ -284,7 +284,9 @@ def _process_optimizer(optimizer, properties):
_master_params_to_model_params
,
optimizer
)
old_step
=
optimizer
.
step
def
new_step
(
self
):
def
new_step
(
self
,
closure
=
None
):
if
closure
is
not
None
:
raise
RuntimeError
(
"Currently, Amp does not support closure use with optimizers."
)
retval
=
old_step
()
self
.
_master_params_to_model_params
()
# Clear the master grads that wouldn't be zeroed by model.zero_grad()
...
...
apex/amp/handle.py
View file @
68c850d3
...
...
@@ -136,7 +136,9 @@ def scale_loss(loss,
# necessary because amp.scale_loss is already creating a temporary scope.
def
patch_step
(
opt
,
loss_scaler
,
loss_id
):
opt_step
=
opt
.
step
def
skip_step
():
def
skip_step
(
closure
=
None
):
if
closure
is
not
None
:
raise
RuntimeError
(
"Currently, Amp does not support closure use with optimizers."
)
maybe_print
((
"Gradient overflow. Skipping step, loss scaler "
+
"{} reducing loss scale to {}"
).
format
(
loss_id
,
loss_scaler
.
loss_scale
()))
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment