Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
ColossalAI
Commits
30aee9c4
Unverified
Commit
30aee9c4
authored
Feb 15, 2023
by
binmakeswell
Committed by
GitHub
Feb 15, 2023
Browse files
[NFC] polish code format
[NFC] polish code format
parents
1dc003c1
93b788b9
Changes
7
Show whitespace changes
Inline
Side-by-side
Showing
7 changed files
with
172 additions
and
165 deletions
+172
-165
colossalai/cli/cli.py
colossalai/cli/cli.py
+3
-2
colossalai/cli/launcher/__init__.py
colossalai/cli/launcher/__init__.py
+3
-1
colossalai/context/moe_context.py
colossalai/context/moe_context.py
+129
-129
colossalai/context/process_group_initializer/initializer_2d.py
...salai/context/process_group_initializer/initializer_2d.py
+3
-2
colossalai/context/process_group_initializer/initializer_pipeline.py
...context/process_group_initializer/initializer_pipeline.py
+2
-1
colossalai/context/process_group_initializer/initializer_sequence.py
...context/process_group_initializer/initializer_sequence.py
+2
-1
colossalai/engine/gradient_handler/utils.py
colossalai/engine/gradient_handler/utils.py
+30
-29
No files found.
colossalai/cli/cli.py
View file @
30aee9c4
import
click
import
click
from
.launcher
import
run
from
.check
import
check
from
.benchmark
import
benchmark
from
.benchmark
import
benchmark
from
.check
import
check
from
.launcher
import
run
class
Arguments
():
class
Arguments
():
...
...
colossalai/cli/launcher/__init__.py
View file @
30aee9c4
import
click
import
click
from
.run
import
launch_multi_processes
from
colossalai.context
import
Config
from
colossalai.context
import
Config
from
.run
import
launch_multi_processes
@
click
.
command
(
help
=
"Launch distributed training on a single node or multiple nodes"
,
@
click
.
command
(
help
=
"Launch distributed training on a single node or multiple nodes"
,
context_settings
=
dict
(
ignore_unknown_options
=
True
))
context_settings
=
dict
(
ignore_unknown_options
=
True
))
...
...
colossalai/context/moe_context.py
View file @
30aee9c4
from
typing
import
Tuple
import
torch
import
torch
import
torch.distributed
as
dist
import
torch.distributed
as
dist
...
@@ -5,8 +7,6 @@ from colossalai.context.parallel_mode import ParallelMode
...
@@ -5,8 +7,6 @@ from colossalai.context.parallel_mode import ParallelMode
from
colossalai.context.singleton_meta
import
SingletonMeta
from
colossalai.context.singleton_meta
import
SingletonMeta
from
colossalai.tensor
import
ProcessGroup
from
colossalai.tensor
import
ProcessGroup
from
typing
import
Tuple
def
_check_sanity
():
def
_check_sanity
():
from
colossalai.core
import
global_context
as
gpc
from
colossalai.core
import
global_context
as
gpc
...
...
colossalai/context/process_group_initializer/initializer_2d.py
View file @
30aee9c4
...
@@ -2,10 +2,11 @@ import math
...
@@ -2,10 +2,11 @@ import math
import
torch.distributed
as
dist
import
torch.distributed
as
dist
from
colossalai.global_variables
import
tensor_parallel_env
as
env
from
colossalai.registry
import
DIST_GROUP_INITIALIZER
from
colossalai.registry
import
DIST_GROUP_INITIALIZER
from
.process_group_initializer
import
ProcessGroupInitializer
from
..parallel_mode
import
ParallelMode
from
..parallel_mode
import
ParallelMode
from
colossalai.global_variables
import
tensor_parallel_env
as
env
from
.process_group_initializer
import
ProcessGroupInitializer
def
_check_summa_env_var
(
summa_dim
):
def
_check_summa_env_var
(
summa_dim
):
...
...
colossalai/context/process_group_initializer/initializer_pipeline.py
View file @
30aee9c4
...
@@ -4,8 +4,9 @@
...
@@ -4,8 +4,9 @@
from
torch
import
distributed
as
dist
from
torch
import
distributed
as
dist
from
colossalai.registry
import
DIST_GROUP_INITIALIZER
from
colossalai.registry
import
DIST_GROUP_INITIALIZER
from
.process_group_initializer
import
ProcessGroupInitializer
from
..parallel_mode
import
ParallelMode
from
..parallel_mode
import
ParallelMode
from
.process_group_initializer
import
ProcessGroupInitializer
@
DIST_GROUP_INITIALIZER
.
register_module
@
DIST_GROUP_INITIALIZER
.
register_module
...
...
colossalai/context/process_group_initializer/initializer_sequence.py
View file @
30aee9c4
...
@@ -3,9 +3,10 @@
...
@@ -3,9 +3,10 @@
import
torch.distributed
as
dist
import
torch.distributed
as
dist
from
colossalai.registry
import
DIST_GROUP_INITIALIZER
from
colossalai.registry
import
DIST_GROUP_INITIALIZER
from
..parallel_mode
import
ParallelMode
from
.initializer_tensor
import
Initializer_Tensor
from
.initializer_tensor
import
Initializer_Tensor
from
.process_group_initializer
import
ProcessGroupInitializer
from
.process_group_initializer
import
ProcessGroupInitializer
from
..parallel_mode
import
ParallelMode
@
DIST_GROUP_INITIALIZER
.
register_module
@
DIST_GROUP_INITIALIZER
.
register_module
...
...
colossalai/engine/gradient_handler/utils.py
View file @
30aee9c4
from
typing
import
Iterable
import
torch.distributed
as
dist
import
torch.distributed
as
dist
import
torch.nn
as
nn
import
torch.nn
as
nn
from
torch._utils
import
_flatten_dense_tensors
,
_unflatten_dense_tensors
from
torch._utils
import
_flatten_dense_tensors
,
_unflatten_dense_tensors
from
typing
import
Iterable
def
bucket_allreduce
(
param_list
:
Iterable
[
nn
.
Parameter
],
group
=
None
):
def
bucket_allreduce
(
param_list
:
Iterable
[
nn
.
Parameter
],
group
=
None
):
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment