Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
Megatron-LM
Commits
b1ac9fd3
Commit
b1ac9fd3
authored
May 05, 2020
by
mohammad
Browse files
tested and seems to be working
parent
f257d0ee
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
12 additions
and
3 deletions
+12
-3
megatron/arguments.py
megatron/arguments.py
+9
-1
megatron/model/transformer.py
megatron/model/transformer.py
+3
-2
No files found.
megatron/arguments.py
View file @
b1ac9fd3
...
...
@@ -89,6 +89,14 @@ def parse_args(extra_args_provider=None, defaults={},
assert
args
.
min_lr
<=
args
.
lr
if
args
.
save
is
not
None
:
assert
args
.
save_interval
is
not
None
# Parameters sharing does not work with torch DDP.
if
(
args
.
num_unique_layers
is
not
None
)
and
(
args
.
num_layers
is
not
None
):
assert
args
.
num_unique_layers
<=
args
.
num_layers
assert
args
.
num_layers
%
args
.
num_unique_layers
==
0
,
\
'num-layers should be divisible by num-unique-layers.'
if
args
.
num_unique_layers
<
args
.
num_layers
:
assert
args
.
DDP_impl
==
'local'
,
\
'torch-DDP does not work with parameters sharing.'
_print_args
(
args
)
return
args
...
...
@@ -120,7 +128,7 @@ def _add_network_size_args(parser):
help
=
'Number of unique transformer layers. '
'`num-layers` should be divisible by this value.'
)
group
.
add_argument
(
'--param-sharing-style'
,
default
=
'grouped'
,
choices
=
[
'grouped'
,
'space'
],
choices
=
[
'grouped'
,
'space
d
'
],
help
=
'Ordering of the shared parameters. For example, '
'for a `num-layers`=4 and `--num-unique-layers`=2, '
'we will have the following ordering for two unique '
...
...
megatron/model/transformer.py
View file @
b1ac9fd3
...
...
@@ -383,8 +383,9 @@ class ParallelTransformer(MegatronModule):
if
torch
.
distributed
.
get_rank
()
==
0
:
print
(
'> will be using the following layer ordering:'
)
for
i
in
range
(
self
.
num_layers
):
print
(
' layer: {:3d} --> unique layer: {:3d}'
.
format
(
i
,
self
.
_get_layer_index
(
i
)),
flush
=
True
)
print
(
' layer id: {:3d} --> unique layer id: '
'{:3d}'
.
format
(
i
,
self
.
_get_layer_index
(
i
)),
flush
=
True
)
# Final layer norm before output.
self
.
final_layernorm
=
LayerNorm
(
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment