Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
ColossalAI
Commits
aafc3516
Commit
aafc3516
authored
Jan 09, 2023
by
oahzxl
Browse files
add available
parent
d5c4f0bf
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
4 additions
and
0 deletions
+4
-0
tests/test_autochunk/test_autochunk_codegen.py
tests/test_autochunk/test_autochunk_codegen.py
+2
-0
tests/test_autochunk/test_autochunk_search.py
tests/test_autochunk/test_autochunk_search.py
+2
-0
No files found.
tests/test_autochunk/test_autochunk_codegen.py
View file @
aafc3516
...
...
@@ -9,6 +9,7 @@ import colossalai
from
colossalai.autochunk.autochunk_codegen
import
AutoChunkCodeGen
from
colossalai.core
import
global_context
as
gpc
from
colossalai.fx
import
ColoTracer
from
colossalai.fx.codegen.activation_checkpoint_codegen
import
CODEGEN_AVAILABLE
from
colossalai.fx.graph_module
import
ColoGraphModule
from
colossalai.fx.passes.meta_info_prop
import
MetaInfoProp
from
colossalai.fx.profiler
import
MetaTensor
...
...
@@ -99,6 +100,7 @@ def _test_autochunk_codegen(rank, msa_len, pair_len, max_memory):
gpc
.
destroy
()
@
pytest
.
mark
.
skipif
(
not
CODEGEN_AVAILABLE
,
reason
=
'torch version is lower than 1.12.0'
)
@
pytest
.
mark
.
parametrize
(
"max_memory"
,
[
None
,
20
,
25
,
30
])
@
pytest
.
mark
.
parametrize
(
"msa_len"
,
[
32
])
@
pytest
.
mark
.
parametrize
(
"pair_len"
,
[
64
])
...
...
tests/test_autochunk/test_autochunk_search.py
View file @
aafc3516
...
...
@@ -8,6 +8,7 @@ import torch.multiprocessing as mp
import
colossalai
from
colossalai.autochunk.autochunk_codegen
import
AutoChunkCodeGen
from
colossalai.core
import
global_context
as
gpc
from
colossalai.fx.codegen.activation_checkpoint_codegen
import
CODEGEN_AVAILABLE
from
colossalai.fx.passes.meta_info_prop
import
MetaInfoProp
from
colossalai.fx.profiler
import
MetaTensor
from
colossalai.utils
import
free_port
...
...
@@ -86,6 +87,7 @@ def _test_autochunk_search(rank, msa_len, pair_len, max_memory):
gpc
.
destroy
()
@
pytest
.
mark
.
skipif
(
not
CODEGEN_AVAILABLE
,
reason
=
"torch version is lower than 1.12.0"
)
@
pytest
.
mark
.
parametrize
(
"max_memory"
,
[
None
,
20
,
25
,
30
])
@
pytest
.
mark
.
parametrize
(
"msa_len"
,
[
32
])
@
pytest
.
mark
.
parametrize
(
"pair_len"
,
[
64
])
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment