Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
ColossalAI
Commits
c6930d8d
Unverified
Commit
c6930d8d
authored
Apr 24, 2022
by
YuliangLiu0306
Committed by
GitHub
Apr 24, 2022
Browse files
[pipelinable]use ColoTensor to replace dummy tensor. (#853)
parent
bcc86550
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
26 additions
and
2 deletions
+26
-2
colossalai/tensor/colo_tensor.py
colossalai/tensor/colo_tensor.py
+16
-0
colossalai/utils/model/pipelinable.py
colossalai/utils/model/pipelinable.py
+10
-2
No files found.
colossalai/tensor/colo_tensor.py
View file @
c6930d8d
...
...
@@ -53,6 +53,22 @@ class ColoTensor(object):
def
size
(
self
):
return
self
.
_size
@
property
def
shape
(
self
):
return
torch
.
Size
(
self
.
_size
)
def
size
(
self
,
dim
=
None
):
if
dim
is
None
:
return
self
.
shape
return
self
.
_size
[
dim
]
def
dim
(
self
):
return
len
(
self
.
_size
)
def
normal_
(
self
,
mean
=
0.
,
std
=
1.
):
torch_tensor
=
self
.
torch_tensor
()
return
torch_tensor
.
normal_
(
mean
=
mean
,
std
=
std
)
def
numel
(
self
):
return
product
(
self
.
_size
)
...
...
colossalai/utils/model/pipelinable.py
View file @
c6930d8d
...
...
@@ -3,6 +3,7 @@ import functools
from
colossalai.utils.model.utils
import
_substitute_init_recursively
,
InsertPostInitMethodToModuleSubClasses
,
call_to_str
from
colossalai.builder.pipeline
import
partition_uniform
,
partition_balanced
from
colossalai.core
import
global_context
as
gpc
from
colossalai.tensor
import
ColoTensor
class
PipelinableContext
(
InsertPostInitMethodToModuleSubClasses
):
...
...
@@ -64,8 +65,15 @@ class PipelinableContext(InsertPostInitMethodToModuleSubClasses):
layer_spec
=
LayerSpec
(
module
.
__class__
,
*
modified_args
,
**
kwargs
)
layer_spec
.
set_children
(
module
.
children
())
self
.
_layer_spec_dict
[
module_id
]
=
layer_spec
for
param
in
module
.
parameters
(
recurse
=
False
):
param
.
data
=
torch
.
rand
(
1
,
1
)
name_list
=
[]
for
name
,
param
in
module
.
named_parameters
():
if
isinstance
(
param
,
ColoTensor
):
continue
name_list
.
append
((
name
,
param
))
for
name
,
param
in
name_list
:
delattr
(
module
,
name
)
setattr
(
module
,
name
,
ColoTensor
.
init_from_torch_tensor
(
tensor
=
param
,
save_payload
=
False
))
def
to_layer_list
(
self
,
exec_seq
=
None
):
"""
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment