Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
ColossalAI
Commits
a445e118
Unverified
Commit
a445e118
authored
Mar 23, 2022
by
Jiarui Fang
Committed by
GitHub
Mar 23, 2022
Browse files
[polish] polish singleton and global context (#500)
parent
9ec1ce6a
Changes
18
Hide whitespace changes
Inline
Side-by-side
Showing
18 changed files
with
39 additions
and
47 deletions
+39
-47
colossalai/context/__init__.py
colossalai/context/__init__.py
+1
-1
colossalai/context/moe_context.py
colossalai/context/moe_context.py
+8
-9
colossalai/context/parallel_context.py
colossalai/context/parallel_context.py
+5
-16
colossalai/context/singleton_meta.py
colossalai/context/singleton_meta.py
+0
-0
colossalai/core.py
colossalai/core.py
+1
-4
colossalai/engine/gradient_handler/_moe_gradient_handler.py
colossalai/engine/gradient_handler/_moe_gradient_handler.py
+2
-1
colossalai/initialize.py
colossalai/initialize.py
+3
-1
colossalai/nn/layer/moe/experts.py
colossalai/nn/layer/moe/experts.py
+1
-1
colossalai/nn/layer/moe/layers.py
colossalai/nn/layer/moe/layers.py
+1
-1
colossalai/nn/layer/moe/utils.py
colossalai/nn/layer/moe/utils.py
+1
-1
colossalai/nn/loss/loss_moe.py
colossalai/nn/loss/loss_moe.py
+1
-1
colossalai/utils/memory_tracer/model_data_memtracer.py
colossalai/utils/memory_tracer/model_data_memtracer.py
+1
-1
colossalai/utils/moe.py
colossalai/utils/moe.py
+2
-1
model_zoo/moe/models.py
model_zoo/moe/models.py
+1
-1
tests/test_amp/test_naive_fp16.py
tests/test_amp/test_naive_fp16.py
+8
-5
tests/test_moe/test_grad_handler.py
tests/test_moe/test_grad_handler.py
+1
-1
tests/test_moe/test_kernel.py
tests/test_moe/test_kernel.py
+1
-1
tests/test_moe/test_moe_group.py
tests/test_moe/test_moe_group.py
+1
-1
No files found.
colossalai/context/__init__.py
View file @
a445e118
from
.config
import
Config
,
ConfigException
from
.config
import
Config
,
ConfigException
from
.parallel_context
import
ParallelContext
from
.parallel_context
import
ParallelContext
from
.moe_context
import
MoeContext
from
.parallel_mode
import
ParallelMode
from
.parallel_mode
import
ParallelMode
from
.moe_context
import
MOE_CONTEXT
from
.process_group_initializer
import
*
from
.process_group_initializer
import
*
from
.random
import
*
from
.random
import
*
colossalai/context/moe_context.py
View file @
a445e118
import
torch
import
torch
import
torch.distributed
as
dist
import
torch.distributed
as
dist
from
.parallel_mode
import
ParallelMode
from
colossalai.context.parallel_mode
import
ParallelMode
from
colossalai.context.singleton_meta
import
SingletonMeta
from
typing
import
Tuple
from
typing
import
Tuple
...
@@ -56,17 +59,10 @@ class MoeParallelInfo:
...
@@ -56,17 +59,10 @@ class MoeParallelInfo:
self
.
dp_group
=
group
self
.
dp_group
=
group
class
MoeContext
:
class
MoeContext
(
metaclass
=
SingletonMeta
)
:
"""MoE parallel context manager. This class manages different
"""MoE parallel context manager. This class manages different
parallel groups in MoE context and MoE loss in training.
parallel groups in MoE context and MoE loss in training.
"""
"""
__instance
=
None
@
staticmethod
def
get_instance
():
if
MoeContext
.
__instance
is
None
:
MoeContext
.
__instance
=
MoeContext
()
return
MoeContext
.
__instance
def
__init__
(
self
):
def
__init__
(
self
):
self
.
world_size
=
1
self
.
world_size
=
1
...
@@ -160,3 +156,6 @@ class MoeContext:
...
@@ -160,3 +156,6 @@ class MoeContext:
def
get_loss
(
self
):
def
get_loss
(
self
):
return
self
.
aux_loss
return
self
.
aux_loss
MOE_CONTEXT
=
MoeContext
()
colossalai/context/parallel_context.py
View file @
a445e118
...
@@ -15,30 +15,16 @@ from colossalai.registry import DIST_GROUP_INITIALIZER
...
@@ -15,30 +15,16 @@ from colossalai.registry import DIST_GROUP_INITIALIZER
from
.parallel_mode
import
ParallelMode
from
.parallel_mode
import
ParallelMode
from
.random
import
add_seed
,
get_seeds
,
set_mode
from
.random
import
add_seed
,
get_seeds
,
set_mode
from
colossalai.context.singleton_meta
import
SingletonMeta
class
ParallelContext
:
class
ParallelContext
(
metaclass
=
SingletonMeta
)
:
"""This class provides interface functions for users to get the parallel context,
"""This class provides interface functions for users to get the parallel context,
such as the global rank, the local rank, the world size, etc. of each device.
such as the global rank, the local rank, the world size, etc. of each device.
"""
"""
__instance
=
None
@
staticmethod
def
get_instance
():
if
ParallelContext
.
__instance
is
None
:
ParallelContext
()
return
ParallelContext
.
__instance
def
__init__
(
self
):
def
__init__
(
self
):
# create a singleton instance
if
ParallelContext
.
__instance
is
not
None
:
raise
Exception
(
'ParallelContext is a singleton class, you should get the instance by colossalai.core.global_context'
)
else
:
ParallelContext
.
__instance
=
self
# distributed settings
# distributed settings
self
.
_global_ranks
=
dict
()
self
.
_global_ranks
=
dict
()
self
.
_local_ranks
=
dict
()
self
.
_local_ranks
=
dict
()
...
@@ -510,3 +496,6 @@ class ParallelContext:
...
@@ -510,3 +496,6 @@ class ParallelContext:
def
set_virtual_pipeline_parallel_rank
(
self
,
rank
):
def
set_virtual_pipeline_parallel_rank
(
self
,
rank
):
self
.
virtual_pipeline_parallel_rank
=
rank
self
.
virtual_pipeline_parallel_rank
=
rank
global_context
=
ParallelContext
()
colossalai/
utils/commons
/singleton_meta.py
→
colossalai/
context
/singleton_meta.py
View file @
a445e118
File moved
colossalai/core.py
View file @
a445e118
#!/usr/bin/env python
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# -*- encoding: utf-8 -*-
from
colossalai.context
import
ParallelContext
,
MoeContext
from
colossalai.context.parallel_context
import
global_context
global_context
=
ParallelContext
.
get_instance
()
MOE_CONTEXT
=
MoeContext
.
get_instance
()
colossalai/engine/gradient_handler/_moe_gradient_handler.py
View file @
a445e118
from
colossalai.core
import
global_context
as
gpc
,
MOE_CONTEXT
from
colossalai.core
import
global_context
as
gpc
from
colossalai.registry
import
GRADIENT_HANDLER
from
colossalai.registry
import
GRADIENT_HANDLER
from
colossalai.utils.moe
import
get_moe_epsize_param_dict
from
colossalai.utils.moe
import
get_moe_epsize_param_dict
from
._base_gradient_handler
import
BaseGradientHandler
from
._base_gradient_handler
import
BaseGradientHandler
from
...context.parallel_mode
import
ParallelMode
from
...context.parallel_mode
import
ParallelMode
from
.utils
import
bucket_allreduce
from
.utils
import
bucket_allreduce
from
colossalai.context.moe_context
import
MOE_CONTEXT
@
GRADIENT_HANDLER
.
register_module
@
GRADIENT_HANDLER
.
register_module
...
...
colossalai/initialize.py
View file @
a445e118
...
@@ -19,7 +19,9 @@ from colossalai.amp import AMP_TYPE, convert_to_amp
...
@@ -19,7 +19,9 @@ from colossalai.amp import AMP_TYPE, convert_to_amp
from
colossalai.amp.naive_amp
import
NaiveAMPModel
from
colossalai.amp.naive_amp
import
NaiveAMPModel
from
colossalai.builder.builder
import
build_gradient_handler
from
colossalai.builder.builder
import
build_gradient_handler
from
colossalai.context
import
Config
,
ConfigException
,
ParallelMode
from
colossalai.context
import
Config
,
ConfigException
,
ParallelMode
from
colossalai.core
import
global_context
as
gpc
,
MOE_CONTEXT
from
colossalai.core
import
global_context
as
gpc
from
colossalai.context.moe_context
import
MOE_CONTEXT
from
colossalai.engine
import
Engine
from
colossalai.engine
import
Engine
from
colossalai.engine.ophooks
import
BaseOpHook
from
colossalai.engine.ophooks
import
BaseOpHook
from
colossalai.logging
import
get_dist_logger
from
colossalai.logging
import
get_dist_logger
...
...
colossalai/nn/layer/moe/experts.py
View file @
a445e118
...
@@ -4,7 +4,7 @@ import torch
...
@@ -4,7 +4,7 @@ import torch
import
torch.nn
as
nn
import
torch.nn
as
nn
from
colossalai.context
import
ParallelMode
,
seed
from
colossalai.context
import
ParallelMode
,
seed
from
colossalai.utils
import
get_current_device
from
colossalai.utils
import
get_current_device
from
colossalai.co
re
import
MOE_CONTEXT
from
colossalai.co
ntext.moe_context
import
MOE_CONTEXT
from
typing
import
Type
from
typing
import
Type
...
...
colossalai/nn/layer/moe/layers.py
View file @
a445e118
...
@@ -4,7 +4,7 @@ import torch
...
@@ -4,7 +4,7 @@ import torch
import
torch.nn
as
nn
import
torch.nn
as
nn
import
torch.nn.functional
as
F
import
torch.nn.functional
as
F
import
torch.distributed
as
dist
import
torch.distributed
as
dist
from
colossalai.co
re
import
MOE_CONTEXT
from
colossalai.co
ntext.moe_context
import
MOE_CONTEXT
from
colossalai.utils
import
get_current_device
from
colossalai.utils
import
get_current_device
from
._operation
import
COL_MOE_KERNEL_FLAG
,
AllToAll
,
AllGather
,
ReduceScatter
,
MoeDispatch
,
MoeCombine
,
moe_cumsum
from
._operation
import
COL_MOE_KERNEL_FLAG
,
AllToAll
,
AllGather
,
ReduceScatter
,
MoeDispatch
,
MoeCombine
,
moe_cumsum
from
.experts
import
MoeExperts
,
Experts
from
.experts
import
MoeExperts
,
Experts
...
...
colossalai/nn/layer/moe/utils.py
View file @
a445e118
import
torch
import
torch
from
colossalai.utils
import
get_current_device
from
colossalai.utils
import
get_current_device
from
colossalai.co
re
import
MOE_CONTEXT
from
colossalai.co
ntext.moe_context
import
MOE_CONTEXT
from
.experts
import
FFNExperts
,
TPExperts
from
.experts
import
FFNExperts
,
TPExperts
...
...
colossalai/nn/loss/loss_moe.py
View file @
a445e118
import
torch.nn
as
nn
import
torch.nn
as
nn
from
colossalai.registry
import
LOSSES
from
colossalai.registry
import
LOSSES
from
torch.nn.modules.loss
import
_Loss
from
torch.nn.modules.loss
import
_Loss
from
colossalai.co
re
import
MOE_CONTEXT
from
colossalai.co
ntext.moe_context
import
MOE_CONTEXT
@
LOSSES
.
register_module
@
LOSSES
.
register_module
...
...
colossalai/utils/memory_tracer/model_data_memtracer.py
View file @
a445e118
from
colossalai.
utils.commons
.singleton_meta
import
SingletonMeta
from
colossalai.
context
.singleton_meta
import
SingletonMeta
from
colossalai.utils.memory_tracer.commons
import
col_tensor_mem_usage
from
colossalai.utils.memory_tracer.commons
import
col_tensor_mem_usage
import
torch
import
torch
...
...
colossalai/utils/moe.py
View file @
a445e118
import
torch.nn
as
nn
import
torch.nn
as
nn
import
torch.distributed
as
dist
import
torch.distributed
as
dist
from
colossalai.core
import
global_context
as
gpc
,
MOE_CONTEXT
from
colossalai.core
import
global_context
as
gpc
from
colossalai.context.moe_context
import
MOE_CONTEXT
from
colossalai.context
import
ParallelMode
from
colossalai.context
import
ParallelMode
from
.common
import
is_using_ddp
from
.common
import
is_using_ddp
from
typing
import
Dict
,
List
from
typing
import
Dict
,
List
...
...
model_zoo/moe/models.py
View file @
a445e118
...
@@ -7,7 +7,7 @@ from colossalai.nn.layer import VanillaPatchEmbedding, VanillaClassifier, \
...
@@ -7,7 +7,7 @@ from colossalai.nn.layer import VanillaPatchEmbedding, VanillaClassifier, \
from
colossalai.nn.layer.moe
import
build_ffn_experts
,
MoeLayer
,
Top2Router
,
NormalNoiseGenerator
,
MoeModule
from
colossalai.nn.layer.moe
import
build_ffn_experts
,
MoeLayer
,
Top2Router
,
NormalNoiseGenerator
,
MoeModule
from
.util
import
moe_sa_args
,
moe_mlp_args
from
.util
import
moe_sa_args
,
moe_mlp_args
from
..helper
import
TransformerLayer
from
..helper
import
TransformerLayer
from
colossalai.co
re
import
MOE_CONTEXT
from
colossalai.co
ntext.moe_context
import
MOE_CONTEXT
from
colossalai.utils
import
get_current_device
from
colossalai.utils
import
get_current_device
from
typing
import
List
from
typing
import
List
...
...
tests/test_amp/test_naive_fp16.py
View file @
a445e118
import
torch
import
torch
import
colossalai
import
copy
import
pytest
import
torch.multiprocessing
as
mp
import
torch.multiprocessing
as
mp
from
colossalai.amp
import
convert_to_naive_amp
,
convert_to_apex_amp
from
tests.components_to_test.registry
import
non_distributed_component_funcs
import
colossalai
from
colossalai.testing
import
assert_close_loose
from
colossalai.testing
import
assert_close_loose
from
colossalai.utils
import
free_port
from
colossalai.utils
import
free_port
from
colossalai.amp
import
convert_to_naive_amp
,
convert_to_apex_amp
from
tests.components_to_test.registry
import
non_distributed_component_funcs
import
copy
import
pytest
from
functools
import
partial
from
functools
import
partial
...
...
tests/test_moe/test_grad_handler.py
View file @
a445e118
...
@@ -7,7 +7,7 @@ import torch.distributed as dist
...
@@ -7,7 +7,7 @@ import torch.distributed as dist
import
colossalai
import
colossalai
from
colossalai.utils
import
free_port
,
get_current_device
from
colossalai.utils
import
free_port
,
get_current_device
from
colossalai.nn.layer.moe
import
Top1Router
,
UniformNoiseGenerator
,
MoeLayer
,
Experts
from
colossalai.nn.layer.moe
import
Top1Router
,
UniformNoiseGenerator
,
MoeLayer
,
Experts
from
colossalai.co
re
import
MOE_CONTEXT
from
colossalai.co
ntext.moe_context
import
MOE_CONTEXT
from
colossalai.utils.moe
import
sync_moe_model_param
from
colossalai.utils.moe
import
sync_moe_model_param
from
colossalai.engine.gradient_handler
import
MoeGradientHandler
from
colossalai.engine.gradient_handler
import
MoeGradientHandler
from
colossalai.testing
import
assert_equal_in_group
from
colossalai.testing
import
assert_equal_in_group
...
...
tests/test_moe/test_kernel.py
View file @
a445e118
...
@@ -8,7 +8,7 @@ from colossalai.context import ParallelMode
...
@@ -8,7 +8,7 @@ from colossalai.context import ParallelMode
from
colossalai.core
import
global_context
as
gpc
from
colossalai.core
import
global_context
as
gpc
from
colossalai.utils
import
free_port
,
get_current_device
from
colossalai.utils
import
free_port
,
get_current_device
from
colossalai.nn.layer.moe
import
Top1Router
,
Top2Router
,
MoeLayer
,
Experts
from
colossalai.nn.layer.moe
import
Top1Router
,
Top2Router
,
MoeLayer
,
Experts
from
colossalai.co
re
import
MOE_CONTEXT
from
colossalai.co
ntext.moe_context
import
MOE_CONTEXT
BATCH_SIZE
=
16
BATCH_SIZE
=
16
NUM_EXPERTS
=
4
NUM_EXPERTS
=
4
...
...
tests/test_moe/test_moe_group.py
View file @
a445e118
...
@@ -6,7 +6,7 @@ import torch.distributed as dist
...
@@ -6,7 +6,7 @@ import torch.distributed as dist
import
colossalai
import
colossalai
from
colossalai.utils
import
free_port
,
get_current_device
from
colossalai.utils
import
free_port
,
get_current_device
from
colossalai.nn.layer.moe
import
Experts
from
colossalai.nn.layer.moe
import
Experts
from
colossalai.co
re
import
MOE_CONTEXT
from
colossalai.co
ntext.moe_context
import
MOE_CONTEXT
from
colossalai.utils.moe
import
sync_moe_model_param
from
colossalai.utils.moe
import
sync_moe_model_param
from
colossalai.testing
import
assert_equal_in_group
from
colossalai.testing
import
assert_equal_in_group
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment