Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
vision
Commits
1de53bef
"...asr/git@developer.sourcefind.cn:OpenDAS/torchaudio.git" did not exist on "d62875cc67f0ecae75c6edeffa1c74178308e034"
Unverified
Commit
1de53bef
authored
Nov 12, 2021
by
Vasilis Vryniotis
Committed by
GitHub
Nov 12, 2021
Browse files
Simplify the gradient clipping code. (#4896)
parent
f676f940
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
2 additions
and
10 deletions
+2
-10
references/classification/train.py
references/classification/train.py
+2
-2
references/classification/utils.py
references/classification/utils.py
+0
-8
No files found.
references/classification/train.py
View file @
1de53bef
...
@@ -40,13 +40,13 @@ def train_one_epoch(model, criterion, optimizer, data_loader, device, epoch, arg
...
@@ -40,13 +40,13 @@ def train_one_epoch(model, criterion, optimizer, data_loader, device, epoch, arg
if
args
.
clip_grad_norm
is
not
None
:
if
args
.
clip_grad_norm
is
not
None
:
# we should unscale the gradients of optimizer's assigned params if do gradient clipping
# we should unscale the gradients of optimizer's assigned params if do gradient clipping
scaler
.
unscale_
(
optimizer
)
scaler
.
unscale_
(
optimizer
)
nn
.
utils
.
clip_grad_norm_
(
utils
.
get_optimizer_params
(
optimizer
),
args
.
clip_grad_norm
)
nn
.
utils
.
clip_grad_norm_
(
model
.
parameters
(
),
args
.
clip_grad_norm
)
scaler
.
step
(
optimizer
)
scaler
.
step
(
optimizer
)
scaler
.
update
()
scaler
.
update
()
else
:
else
:
loss
.
backward
()
loss
.
backward
()
if
args
.
clip_grad_norm
is
not
None
:
if
args
.
clip_grad_norm
is
not
None
:
nn
.
utils
.
clip_grad_norm_
(
utils
.
get_optimizer_params
(
optimizer
),
args
.
clip_grad_norm
)
nn
.
utils
.
clip_grad_norm_
(
model
.
parameters
(
),
args
.
clip_grad_norm
)
optimizer
.
step
()
optimizer
.
step
()
if
model_ema
and
i
%
args
.
model_ema_steps
==
0
:
if
model_ema
and
i
%
args
.
model_ema_steps
==
0
:
...
...
references/classification/utils.py
View file @
1de53bef
...
@@ -409,11 +409,3 @@ def reduce_across_processes(val):
...
@@ -409,11 +409,3 @@ def reduce_across_processes(val):
dist
.
barrier
()
dist
.
barrier
()
dist
.
all_reduce
(
t
)
dist
.
all_reduce
(
t
)
return
t
return
t
def
get_optimizer_params
(
optimizer
):
"""Generator to iterate over all parameters in the optimizer param_groups."""
for
group
in
optimizer
.
param_groups
:
for
p
in
group
[
"params"
]:
yield
p
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment