Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
Megatron-LM
Commits
c2ea914f
Commit
c2ea914f
authored
Sep 23, 2022
by
Jared Casper
Browse files
Move layers from mpu to core.tensor_parallel.
parent
209f91c9
Changes
22
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
3 additions
and
2 deletions
+3
-2
megatron/training.py
megatron/training.py
+2
-1
megatron/utils.py
megatron/utils.py
+1
-1
No files found.
megatron/training.py
View file @
c2ea914f
...
@@ -20,6 +20,7 @@ from megatron import get_num_microbatches
...
@@ -20,6 +20,7 @@ from megatron import get_num_microbatches
from
megatron
import
is_last_rank
from
megatron
import
is_last_rank
from
megatron
import
update_num_microbatches
from
megatron
import
update_num_microbatches
from
megatron
import
mpu
from
megatron
import
mpu
from
megatron
import
core
from
megatron
import
print_rank_0
from
megatron
import
print_rank_0
from
megatron
import
print_rank_last
from
megatron
import
print_rank_last
from
megatron.checkpointing
import
load_checkpoint
from
megatron.checkpointing
import
load_checkpoint
...
@@ -257,7 +258,7 @@ def get_model(model_provider_func, model_type=ModelType.encoder_or_decoder, wrap
...
@@ -257,7 +258,7 @@ def get_model(model_provider_func, model_type=ModelType.encoder_or_decoder, wrap
# are set for all params so the optimizer can use them.
# are set for all params so the optimizer can use them.
for
model_module
in
model
:
for
model_module
in
model
:
for
param
in
model_module
.
parameters
():
for
param
in
model_module
.
parameters
():
mpu
.
set_defaults_if_not_set_tensor_model_parallel_attributes
(
param
)
core
.
tensor_parallel
.
set_defaults_if_not_set_tensor_model_parallel_attributes
(
param
)
# Print number of parameters.
# Print number of parameters.
if
mpu
.
get_data_parallel_rank
()
==
0
:
if
mpu
.
get_data_parallel_rank
()
==
0
:
...
...
megatron/utils.py
View file @
c2ea914f
...
@@ -14,7 +14,7 @@ from megatron import get_args
...
@@ -14,7 +14,7 @@ from megatron import get_args
from
megatron
import
get_adlr_autoresume
from
megatron
import
get_adlr_autoresume
from
megatron
import
mpu
from
megatron
import
mpu
from
megatron.model.module
import
param_is_not_shared
from
megatron.model.module
import
param_is_not_shared
from
megatron.
mpu.layers
import
param_is_not_tensor_parallel_duplicate
from
megatron.
core.tensor_parallel
import
param_is_not_tensor_parallel_duplicate
def
unwrap_model
(
model
,
module_instances
=
(
torchDDP
)):
def
unwrap_model
(
model
,
module_instances
=
(
torchDDP
)):
...
...
Prev
1
2
Next
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment