Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
TransformerEngine
Commits
cdbbc475
Commit
cdbbc475
authored
Oct 20, 2025
by
zhaochao
Browse files
[DCU] Skip some tests in test_cuda_graphs.py under L0
Signed-off-by:
zhaochao
<
zhaochao1@sugon.com
>
parent
2148040f
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
11 additions
and
4 deletions
+11
-4
tests/pytorch/test_cuda_graphs.py
tests/pytorch/test_cuda_graphs.py
+11
-4
No files found.
tests/pytorch/test_cuda_graphs.py
View file @
cdbbc475
...
@@ -28,9 +28,9 @@ if IS_HIP_EXTENSION:
...
@@ -28,9 +28,9 @@ if IS_HIP_EXTENSION:
from
functools
import
cache
from
functools
import
cache
# Check if FP8 is supported.
# Check if FP8 is supported.
fp8_available
,
_
=
FP8GlobalStateManager
.
is_fp8_available
()
fp8_available
,
reason_for_no_fp8
=
FP8GlobalStateManager
.
is_fp8_available
()
fp8_block_scaling_available
,
_
=
FP8GlobalStateManager
.
is_fp8_block_scaling_available
()
fp8_block_scaling_available
,
reason_for_no_fp8_block_scaling
=
FP8GlobalStateManager
.
is_fp8_block_scaling_available
()
mxfp8_available
,
_
=
FP8GlobalStateManager
.
is_mxfp8_available
()
mxfp8_available
,
reason_for_no_mxfp8
=
FP8GlobalStateManager
.
is_mxfp8_available
()
# Reset RNG states.
# Reset RNG states.
reset_rng_states
()
reset_rng_states
()
...
@@ -310,7 +310,14 @@ def test_make_graphed_callables(
...
@@ -310,7 +310,14 @@ def test_make_graphed_callables(
pytest
.
skip
(
"FP8 needed for FP8 parameters."
)
pytest
.
skip
(
"FP8 needed for FP8 parameters."
)
if
fp8
and
fp8_recipe
.
float8_block_scaling
()
and
module
==
"linear_op"
:
if
fp8
and
fp8_recipe
.
float8_block_scaling
()
and
module
==
"linear_op"
:
pytest
.
skip
(
"Module not yet supported for float8_block_scaling with CUDA graphs"
)
pytest
.
skip
(
"Module not yet supported for float8_block_scaling with CUDA graphs"
)
if
fp8
and
not
fp8_available
:
pytest
.
skip
(
reason_for_no_fp8
)
if
fp8_params
and
not
fp8
:
pytest
.
skip
(
"FP8 needed for FP8 parameters."
)
if
fp8
and
fp8_recipe
.
float8_block_scaling
()
and
not
fp8_block_scaling_available
:
pytest
.
skip
(
reason_for_no_fp8_block_scaling
)
if
fp8
and
fp8_recipe
.
mxfp8
()
and
not
mxfp8_available
:
pytest
.
skip
(
reason_for_no_mxfp8
)
# Run model with different CUDA graph settings.
# Run model with different CUDA graph settings.
model_config
=
model_configs
[
model_config
]
model_config
=
model_configs
[
model_config
]
kwargs
=
dict
(
kwargs
=
dict
(
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment