Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
ColossalAI
Commits
10ef8afd
"...git@developer.sourcefind.cn:OpenDAS/openpcdet.git" did not exist on "e4d2b75bc0e48399177a1823ad6f4ff8179cc835"
Unverified
Commit
10ef8afd
authored
Apr 14, 2022
by
Jiarui Fang
Committed by
GitHub
Apr 14, 2022
Browse files
[gemini] init genimi individual directory (#754)
parent
dcca614e
Changes
8
Hide whitespace changes
Inline
Side-by-side
Showing
8 changed files
with
14 additions
and
11 deletions
+14
-11
colossalai/gemini/__init__.py
colossalai/gemini/__init__.py
+4
-0
colossalai/gemini/stateful_tensor_mgr.py
colossalai/gemini/stateful_tensor_mgr.py
+1
-1
colossalai/gemini/tensor_placement_policy.py
colossalai/gemini/tensor_placement_policy.py
+0
-0
colossalai/zero/sharded_model/sharded_model_v2.py
colossalai/zero/sharded_model/sharded_model_v2.py
+2
-2
colossalai/zero/sharded_optim/sharded_optim_v2.py
colossalai/zero/sharded_optim/sharded_optim_v2.py
+1
-1
colossalai/zero/utils/__init__.py
colossalai/zero/utils/__init__.py
+1
-3
colossalai/zero/utils/zero_hook.py
colossalai/zero/utils/zero_hook.py
+1
-2
tests/test_zero/test_stateful_tensor_mgr.py
tests/test_zero/test_stateful_tensor_mgr.py
+4
-2
No files found.
colossalai/gemini/__init__.py
0 → 100644
View file @
10ef8afd
from
.stateful_tensor_mgr
import
StatefulTensorMgr
from
.tensor_placement_policy
import
TensorPlacementPolicyFactory
__all__
=
[
'StatefulTensorMgr'
,
'TensorPlacementPolicyFactory'
]
\ No newline at end of file
colossalai/
zero/utils
/stateful_tensor_mgr.py
→
colossalai/
gemini
/stateful_tensor_mgr.py
View file @
10ef8afd
...
...
@@ -5,7 +5,7 @@ from colossalai.utils.cuda import get_current_device
from
colossalai.zero.sharded_param.sharded_param
import
ShardedParamV2
from
colossalai.zero.sharded_param.tensorful_state
import
StatefulTensor
,
TensorState
from
colossalai.zero.sharded_param.tensor_utils
import
colo_model_data_tensor_move_inline
,
colo_tensor_mem_usage
from
colossalai.
zero.utils
.tensor_placement_policy
import
TensorPlacementPolicy
from
colossalai.
gemini
.tensor_placement_policy
import
TensorPlacementPolicy
from
typing
import
List
from
colossalai.logging
import
get_dist_logger
...
...
colossalai/
zero/utils
/tensor_placement_policy.py
→
colossalai/
gemini
/tensor_placement_policy.py
View file @
10ef8afd
File moved
colossalai/zero/sharded_model/sharded_model_v2.py
View file @
10ef8afd
...
...
@@ -22,8 +22,8 @@ from colossalai.zero.sharded_model.reduce_scatter import ReduceScatterBucketer
from
colossalai.zero.sharded_param.tensorful_state
import
TensorState
from
torch.distributed
import
ProcessGroup
from
torch.nn.parameter
import
Parameter
from
colossalai.
zero.utils
.stateful_tensor_mgr
import
StatefulTensorMgr
from
colossalai.
zero.utils
.tensor_placement_policy
import
TensorPlacementPolicyFactory
,
TensorPlacementPolicy
from
colossalai.
gemini
.stateful_tensor_mgr
import
StatefulTensorMgr
from
colossalai.
gemini
.tensor_placement_policy
import
TensorPlacementPolicyFactory
,
TensorPlacementPolicy
from
._utils
import
(
cast_float_arguments
,
cast_tensor_to_fp16
,
cast_tensor_to_fp32
,
chunk_and_pad
,
free_storage
,
get_gradient_predivide_factor
)
...
...
colossalai/zero/sharded_optim/sharded_optim_v2.py
View file @
10ef8afd
...
...
@@ -21,7 +21,7 @@ from torch import Tensor
from
torch.distributed
import
ProcessGroup
from
torch.nn.parameter
import
Parameter
from
torch.optim
import
Optimizer
from
colossalai.
zero.utils
.tensor_placement_policy
import
AutoTensorPlacementPolicy
from
colossalai.
gemini
.tensor_placement_policy
import
AutoTensorPlacementPolicy
class
OptimState
(
Enum
):
...
...
colossalai/zero/utils/__init__.py
View file @
10ef8afd
from
.stateful_tensor_mgr
import
StatefulTensorMgr
from
.tensor_placement_policy
import
TensorPlacementPolicyFactory
from
.zero_hook
import
ZeroHook
__all__
=
[
'StatefulTensorMgr'
,
'ZeroHook'
,
'TensorPlacementPolicyFactory'
]
\ No newline at end of file
__all__
=
[
'ZeroHook'
]
\ No newline at end of file
colossalai/zero/utils/zero_hook.py
View file @
10ef8afd
...
...
@@ -9,8 +9,7 @@ from colossalai.utils.memory_tracer.memstats_collector import MemStatsCollector
from
colossalai.zero.shard_utils
import
BaseShardStrategy
from
colossalai.zero.sharded_param.tensorful_state
import
TensorState
from
colossalai.zero.utils.stateful_tensor_mgr
import
StatefulTensorMgr
from
colossalai.zero.sharded_param.tensor_utils
import
colo_model_data_tensor_move_inline
from
colossalai.gemini.stateful_tensor_mgr
import
StatefulTensorMgr
from
colossalai.engine.ophooks
import
BaseOpHook
...
...
tests/test_zero/test_stateful_tensor_mgr.py
View file @
10ef8afd
...
...
@@ -6,7 +6,7 @@ from colossalai.utils.cuda import get_current_device
from
colossalai.utils.memory_tracer
import
MemStatsCollector
from
colossalai.utils.memory_tracer.model_data_memtracer
import
GLOBAL_MODEL_DATA_TRACER
from
colossalai.utils.memory
import
colo_set_process_memory_fraction
from
colossalai.
zero.utils
import
StatefulTensorMgr
from
colossalai.
gemini
import
StatefulTensorMgr
from
colossalai.zero.sharded_param.sharded_param
import
ShardedParamV2
from
colossalai.zero.sharded_param.tensorful_state
import
TensorState
from
colossalai.utils
import
free_port
...
...
@@ -14,7 +14,9 @@ from colossalai.testing import rerun_on_exception
from
torch.nn.parameter
import
Parameter
from
typing
import
List
from
functools
import
partial
from
colossalai.zero.utils.tensor_placement_policy
import
AutoTensorPlacementPolicy
from
colossalai.gemini
import
StatefulTensorMgr
from
colossalai.gemini.tensor_placement_policy
import
AutoTensorPlacementPolicy
class
Net
(
torch
.
nn
.
Module
):
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment