Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
TransformerEngine
Commits
e17c31c3
Unverified
Commit
e17c31c3
authored
Jun 13, 2023
by
Kirthi Shankar Sivamani
Committed by
GitHub
Jun 13, 2023
Browse files
Update FA version (#279)
Signed-off-by:
Kirthi Shankar Sivamani
<
ksivamani@nvidia.com
>
parent
c67bb2fc
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
2 additions
and
2 deletions
+2
-2
setup.py
setup.py
+1
-1
transformer_engine/pytorch/attention.py
transformer_engine/pytorch/attention.py
+1
-1
No files found.
setup.py
View file @
e17c31c3
...
@@ -290,7 +290,7 @@ def setup_requirements() -> Tuple[List[str], List[str], List[str]]:
...
@@ -290,7 +290,7 @@ def setup_requirements() -> Tuple[List[str], List[str], List[str]]:
# Framework-specific requirements
# Framework-specific requirements
if
"pytorch"
in
frameworks
():
if
"pytorch"
in
frameworks
():
add_unique
(
install_reqs
,
[
"torch"
,
"flash-attn
=
=1.0.
6
"
])
add_unique
(
install_reqs
,
[
"torch"
,
"flash-attn
>=1.0.6, <
=1.0.
7
"
])
add_unique
(
test_reqs
,
[
"numpy"
,
"onnxruntime"
,
"torchvision"
])
add_unique
(
test_reqs
,
[
"numpy"
,
"onnxruntime"
,
"torchvision"
])
if
"jax"
in
frameworks
():
if
"jax"
in
frameworks
():
if
not
found_pybind11
():
if
not
found_pybind11
():
...
...
transformer_engine/pytorch/attention.py
View file @
e17c31c3
...
@@ -35,7 +35,7 @@ from transformer_engine.pytorch.distributed import (
...
@@ -35,7 +35,7 @@ from transformer_engine.pytorch.distributed import (
from
transformer_engine.pytorch.export
import
is_in_onnx_export_mode
from
transformer_engine.pytorch.export
import
is_in_onnx_export_mode
_flash_attn_version
=
packaging
.
version
.
Version
(
version
(
"flash-attn"
))
_flash_attn_version
=
packaging
.
version
.
Version
(
version
(
"flash-attn"
))
_flash_attn_version_required
=
packaging
.
version
.
Version
(
"1.0.
2
"
)
_flash_attn_version_required
=
packaging
.
version
.
Version
(
"1.0.
6
"
)
__all__
=
[
"DotProductAttention"
]
__all__
=
[
"DotProductAttention"
]
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment