Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
Megatron-LM
Commits
c30ba0f7
Commit
c30ba0f7
authored
Dec 08, 2020
by
mohammad
Committed by
Deepak Narayanan
Dec 19, 2020
Browse files
Minor refactoring
parent
feecd5d9
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
4 additions
and
10 deletions
+4
-10
megatron/arguments.py
megatron/arguments.py
+2
-1
megatron/global_vars.py
megatron/global_vars.py
+1
-8
megatron/training.py
megatron/training.py
+1
-1
No files found.
megatron/arguments.py
View file @
c30ba0f7
...
...
@@ -244,7 +244,8 @@ def _add_training_args(parser):
group
.
add_argument
(
'--global-batch-size'
,
type
=
int
,
default
=
None
,
help
=
'Training batch size. If this value is None, then '
'use micro-batch-size * data-parallel-size as the '
'global batch size'
)
'global batch size. This choice will result in 1 for '
'number of micro-batches.'
)
group
.
add_argument
(
'--rampup-batch-size'
,
nargs
=
'*'
,
default
=
None
,
help
=
'Batch size ramp up with the following values:'
' --rampup-batch-size <start batch size> '
...
...
megatron/global_vars.py
View file @
c30ba0f7
...
...
@@ -34,20 +34,12 @@ _GLOBAL_ADLR_AUTORESUME = None
_GLOBAL_TIMERS
=
None
def
get_args
():
"""Return arguments."""
_ensure_var_is_initialized
(
_GLOBAL_ARGS
,
'args'
)
return
_GLOBAL_ARGS
def
get_num_microbatches_calculator
():
"""Return num-microbatches calculator."""
_ensure_var_is_initialized
(
_GLOBAL_NUM_MICROBATCHES_CALCULATOR
,
'number of micro-batches calculator.'
)
return
_GLOBAL_NUM_MICROBATCHES_CALCULATOR
def
get_num_microbatches
():
return
_GLOBAL_NUM_MICROBATCHES_CALCULATOR
.
get
()
...
...
@@ -141,6 +133,7 @@ class NumMicroBatchesCalculator(ABC):
def
get
(
self
):
pass
@
abstractmethod
def
update
(
self
,
consumed_samples
):
pass
...
...
megatron/training.py
View file @
c30ba0f7
...
...
@@ -223,7 +223,7 @@ def setup_model_and_optimizer(model_provider_func):
else
:
args
.
iteration
=
0
# W
rap model for distributed training."""
# W
e only support local DDP with multiple micro-batches.
if
get_num_microbatches
()
>
1
:
assert
args
.
DDP_impl
==
'local'
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment