Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
deepspeed
Commits
e6ac7311
Unverified
Commit
e6ac7311
authored
Jan 04, 2021
by
Olatunji Ruwase
Committed by
GitHub
Jan 04, 2021
Browse files
Support initialization with dict configuration (#632)
parent
24e07399
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
47 additions
and
7 deletions
+47
-7
deepspeed/runtime/engine.py
deepspeed/runtime/engine.py
+3
-3
tests/unit/simple_model.py
tests/unit/simple_model.py
+8
-3
tests/unit/test_fp16.py
tests/unit/test_fp16.py
+36
-1
No files found.
deepspeed/runtime/engine.py
View file @
e6ac7311
...
...
@@ -435,9 +435,9 @@ class DeepSpeedEngine(Module):
# Configure based on command line arguments
def
_configure_with_arguments
(
self
,
args
,
mpu
):
self
.
local_rank
=
args
.
local_rank
if
hasattr
(
args
,
'local_rank'
)
else
0
self
.
_config
=
DeepSpeedConfig
(
args
.
deepspeed_config
,
mpu
,
param_dict
=
self
.
config_params
)
config_file
=
args
.
deepspeed_config
if
hasattr
(
args
,
'deepspeed_config'
)
else
None
self
.
_config
=
DeepSpeedConfig
(
config_file
,
mpu
,
param_dict
=
self
.
config_params
)
# Validate command line arguments
def
_do_args_sanity_check
(
self
,
args
):
...
...
tests/unit/simple_model.py
View file @
e6ac7311
...
...
@@ -161,12 +161,10 @@ def create_config_from_dict(tmpdir, config_dict):
return
config_path
def
args_from_dict
(
tmpdir
,
config_dict
):
config_path
=
create_config_from_dict
(
tmpdir
,
config_dict
)
def
create_deepspeed_args
():
parser
=
argparse
.
ArgumentParser
()
args
=
parser
.
parse_args
(
args
=
''
)
args
.
deepspeed
=
True
args
.
deepspeed_config
=
config_path
if
torch
.
distributed
.
is_initialized
():
# We assume up to one full node executing unit tests
assert
torch
.
distributed
.
get_world_size
()
<=
torch
.
cuda
.
device_count
()
...
...
@@ -174,3 +172,10 @@ def args_from_dict(tmpdir, config_dict):
else
:
args
.
local_rank
=
0
return
args
def
args_from_dict
(
tmpdir
,
config_dict
):
args
=
create_deepspeed_args
()
config_path
=
create_config_from_dict
(
tmpdir
,
config_dict
)
args
.
deepspeed_config
=
config_path
return
args
tests/unit/test_fp16.py
View file @
e6ac7311
...
...
@@ -6,7 +6,7 @@ import json
import
os
from
deepspeed.ops.adam
import
FusedAdam
from
common
import
distributed_test
from
simple_model
import
SimpleModel
,
SimpleOptimizer
,
random_dataloader
,
args_from_dict
from
simple_model
import
SimpleModel
,
SimpleOptimizer
,
random_dataloader
,
args_from_dict
,
create_deepspeed_args
try
:
from
apex
import
amp
...
...
@@ -194,6 +194,41 @@ def test_adamw_fp16_basic(tmpdir):
_test_adamw_fp16_basic
(
args
=
args
,
model
=
model
,
hidden_dim
=
hidden_dim
)
def
test_dict_config_adamw_fp16_basic
():
config_dict
=
{
"train_batch_size"
:
1
,
"steps_per_print"
:
1
,
"fp16"
:
{
"enabled"
:
True
}
}
args
=
create_deepspeed_args
()
hidden_dim
=
10
model
=
SimpleModel
(
hidden_dim
,
empty_grad
=
False
)
@
distributed_test
(
world_size
=
[
1
])
def
_test_adamw_fp16_basic
(
args
,
model
,
hidden_dim
,
config_dict
):
optimizer
=
torch
.
optim
.
AdamW
(
params
=
model
.
parameters
())
model
,
_
,
_
,
_
=
deepspeed
.
initialize
(
args
=
args
,
model
=
model
,
optimizer
=
optimizer
,
config_params
=
config_dict
)
data_loader
=
random_dataloader
(
model
=
model
,
total_samples
=
50
,
hidden_dim
=
hidden_dim
,
device
=
model
.
device
)
for
n
,
batch
in
enumerate
(
data_loader
):
loss
=
model
(
batch
[
0
],
batch
[
1
])
model
.
backward
(
loss
)
model
.
step
()
_test_adamw_fp16_basic
(
args
=
args
,
model
=
model
,
hidden_dim
=
hidden_dim
,
config_dict
=
config_dict
)
def
test_adamw_fp16_empty_grad
(
tmpdir
):
config_dict
=
{
"train_batch_size"
:
1
,
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment