Commit 2fc4b10c authored by zhaochao's avatar zhaochao
Browse files

[DCU] Skip some tests in test_cuda_graphs.py under L0


Signed-off-by: default avatarzhaochao <zhaochao1@sugon.com>
parent 6af7b77d
...@@ -28,9 +28,9 @@ if IS_HIP_EXTENSION: ...@@ -28,9 +28,9 @@ if IS_HIP_EXTENSION:
from functools import cache from functools import cache
# Check if FP8 is supported. # Check if FP8 is supported.
fp8_available, _ = FP8GlobalStateManager.is_fp8_available() fp8_available, reason_for_no_fp8 = FP8GlobalStateManager.is_fp8_available()
fp8_block_scaling_available, _ = FP8GlobalStateManager.is_fp8_block_scaling_available() fp8_block_scaling_available, reason_for_no_fp8_block_scaling = FP8GlobalStateManager.is_fp8_block_scaling_available()
mxfp8_available, _ = FP8GlobalStateManager.is_mxfp8_available() mxfp8_available, reason_for_no_mxfp8 = FP8GlobalStateManager.is_mxfp8_available()
# Reset RNG states. # Reset RNG states.
reset_rng_states() reset_rng_states()
...@@ -310,6 +310,12 @@ def test_make_graphed_callables( ...@@ -310,6 +310,12 @@ def test_make_graphed_callables(
pytest.skip("FP8 needed for FP8 parameters.") pytest.skip("FP8 needed for FP8 parameters.")
if fp8 and fp8_recipe.float8_block_scaling() and module == "linear_op": if fp8 and fp8_recipe.float8_block_scaling() and module == "linear_op":
pytest.skip("Module not yet supported for float8_block_scaling with CUDA graphs") pytest.skip("Module not yet supported for float8_block_scaling with CUDA graphs")
if fp8 and not fp8_available:
pytest.skip(reason_for_no_fp8)
if fp8 and fp8_recipe.float8_block_scaling() and not fp8_block_scaling_available:
pytest.skip(reason_for_no_fp8_block_scaling)
if fp8 and fp8_recipe.mxfp8() and not mxfp8_available:
pytest.skip(reason_for_no_mxfp8)
# Run model with different CUDA graph settings. # Run model with different CUDA graph settings.
model_config = model_configs[model_config] model_config = model_configs[model_config]
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment