Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
vision
Commits
1de53bef
Unverified
Commit
1de53bef
authored
Nov 12, 2021
by
Vasilis Vryniotis
Committed by
GitHub
Nov 12, 2021
Browse files
Simplify the gradient clipping code. (#4896)
parent
f676f940
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
2 additions
and
10 deletions
+2
-10
references/classification/train.py
references/classification/train.py
+2
-2
references/classification/utils.py
references/classification/utils.py
+0
-8
No files found.
references/classification/train.py
View file @
1de53bef
...
...
@@ -40,13 +40,13 @@ def train_one_epoch(model, criterion, optimizer, data_loader, device, epoch, arg
if
args
.
clip_grad_norm
is
not
None
:
# we should unscale the gradients of optimizer's assigned params if do gradient clipping
scaler
.
unscale_
(
optimizer
)
nn
.
utils
.
clip_grad_norm_
(
utils
.
get_optimizer_params
(
optimizer
),
args
.
clip_grad_norm
)
nn
.
utils
.
clip_grad_norm_
(
model
.
parameters
(
),
args
.
clip_grad_norm
)
scaler
.
step
(
optimizer
)
scaler
.
update
()
else
:
loss
.
backward
()
if
args
.
clip_grad_norm
is
not
None
:
nn
.
utils
.
clip_grad_norm_
(
utils
.
get_optimizer_params
(
optimizer
),
args
.
clip_grad_norm
)
nn
.
utils
.
clip_grad_norm_
(
model
.
parameters
(
),
args
.
clip_grad_norm
)
optimizer
.
step
()
if
model_ema
and
i
%
args
.
model_ema_steps
==
0
:
...
...
references/classification/utils.py
View file @
1de53bef
...
...
@@ -409,11 +409,3 @@ def reduce_across_processes(val):
dist
.
barrier
()
dist
.
all_reduce
(
t
)
return
t
def
get_optimizer_params
(
optimizer
):
"""Generator to iterate over all parameters in the optimizer param_groups."""
for
group
in
optimizer
.
param_groups
:
for
p
in
group
[
"params"
]:
yield
p
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment