Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
apex
Commits
137d822b
Commit
137d822b
authored
Jun 08, 2018
by
Christian Sarofeen
Browse files
Better shortcut for shared_param = True
parent
fb075b86
Changes
1
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
16 additions
and
14 deletions
+16
-14
apex/parallel/distributed.py
apex/parallel/distributed.py
+16
-14
No files found.
apex/parallel/distributed.py
View file @
137d822b
...
@@ -79,17 +79,19 @@ class DistributedDataParallel(Module):
...
@@ -79,17 +79,19 @@ class DistributedDataParallel(Module):
def
create_hooks
(
self
):
def
create_hooks
(
self
):
#all reduce gradient hook
#all reduce gradient hook
def
allreduce_params
():
def
allreduce_params
():
if
(
self
.
needs_reduction
):
if
not
self
.
needs_reduction
:
self
.
needs_reduction
=
False
self
.
needs_refresh
=
False
else
:
return
return
grads
=
[
param
.
grad
.
data
for
param
in
self
.
module
.
parameters
()
if
param
.
grad
is
not
None
]
self
.
needs_reduction
=
False
flat_dist_call
(
grads
,
dist
.
all_reduce
)
#parameter ordering refresh
if
self
.
needs_refresh
and
not
self
.
shared_param
:
t_record
=
torch
.
cuda
.
IntTensor
(
self
.
record
)
t_record
=
torch
.
cuda
.
IntTensor
(
self
.
record
)
dist
.
broadcast
(
t_record
,
0
)
dist
.
broadcast
(
t_record
,
0
)
self
.
record
=
[
int
(
entry
)
for
entry
in
t_record
]
self
.
record
=
[
int
(
entry
)
for
entry
in
t_record
]
self
.
needs_refresh
=
False
grads
=
[
param
.
grad
.
data
for
param
in
self
.
module
.
parameters
()
if
param
.
grad
is
not
None
]
flat_dist_call
(
grads
,
dist
.
all_reduce
)
def
flush_buckets
():
def
flush_buckets
():
if
not
self
.
needs_reduction
:
if
not
self
.
needs_reduction
:
...
@@ -184,10 +186,10 @@ class DistributedDataParallel(Module):
...
@@ -184,10 +186,10 @@ class DistributedDataParallel(Module):
#Force needs_refresh to True if there are shared params
#Force needs_refresh to True if there are shared params
#this will force it to always, only call flush_buckets which is safe
#this will force it to always, only call flush_buckets which is safe
#for shared parameters in the model.
#for shared parameters in the model.
if
self
.
shared_param
:
if
not
self
.
param_refs
or
self
.
shared_param
:
self
.
param_refs
=
[]
self
.
needs_refresh
=
True
else
:
self
.
needs_refresh
=
True
if
not
self
.
param_refs
else
any
(
self
.
needs_refresh
=
any
(
[
param1
is
not
param2
for
param1
,
param2
in
zip
(
param_list
,
self
.
param_refs
)]
[
param1
is
not
param2
for
param1
,
param2
in
zip
(
param_list
,
self
.
param_refs
)]
)
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment