Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
ColossalAI
Commits
efe6fe3a
Commit
efe6fe3a
authored
Dec 29, 2022
by
oahzxl
Browse files
code style
parent
7a23deb5
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
17 additions
and
17 deletions
+17
-17
autochunk_benchmark.py
autochunk_benchmark.py
+17
-17
No files found.
autochunk_benchmark.py
View file @
efe6fe3a
...
...
@@ -33,23 +33,6 @@ def _benchmark_evoformer(model: torch.nn.Module, node, pair, title):
)
def
benchmark_evoformer
():
# init data and model
msa_len
=
300
pair_len
=
800
node
=
torch
.
randn
(
1
,
msa_len
,
pair_len
,
256
).
cuda
()
pair
=
torch
.
randn
(
1
,
pair_len
,
pair_len
,
128
).
cuda
()
model
=
evoformer_base
().
cuda
()
# build autochunk model
max_memory
=
3000
# MB
autochunk
=
_build_autochunk
(
model
,
max_memory
,
node
,
pair
)
# benchmark
_benchmark_evoformer
(
model
,
node
,
pair
,
"openfold"
)
_benchmark_evoformer
(
autochunk
,
node
,
pair
,
"autochunk"
)
def
_build_autochunk
(
model
,
max_memory
,
node
,
pair
):
# trace the module and replace codegen
graph
=
ColoTracer
().
trace
(
...
...
@@ -81,5 +64,22 @@ def _build_autochunk(model, max_memory, node, pair):
return
gm
def
benchmark_evoformer
():
# init data and model
msa_len
=
300
pair_len
=
800
node
=
torch
.
randn
(
1
,
msa_len
,
pair_len
,
256
).
cuda
()
pair
=
torch
.
randn
(
1
,
pair_len
,
pair_len
,
128
).
cuda
()
model
=
evoformer_base
().
cuda
()
# build autochunk model
max_memory
=
3000
# MB
autochunk
=
_build_autochunk
(
model
,
max_memory
,
node
,
pair
)
# benchmark
_benchmark_evoformer
(
model
,
node
,
pair
,
"openfold"
)
_benchmark_evoformer
(
autochunk
,
node
,
pair
,
"autochunk"
)
if
__name__
==
"__main__"
:
benchmark_evoformer
()
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment