Unverified Commit 02096f61 authored by Tim Moon's avatar Tim Moon Committed by GitHub
Browse files

[PyTorch] Check if FP8 block scaling is supported in tests (#1727)



* Check if FP8 block scaling is supported in tests
Signed-off-by: default avatarTim Moon <tmoon@nvidia.com>

* Update tests/pytorch/distributed/test_cast_master_weights_to_fp8.py
Signed-off-by: default avatarTim Moon <4406448+timmoon10@users.noreply.github.com>

---------
Signed-off-by: default avatarTim Moon <tmoon@nvidia.com>
Signed-off-by: default avatarTim Moon <4406448+timmoon10@users.noreply.github.com>
parent ef7dee4b
...@@ -15,6 +15,9 @@ if torch.cuda.device_count() < 2: ...@@ -15,6 +15,9 @@ if torch.cuda.device_count() < 2:
pytest.skip("cast_master_weights_to_fp8 test needs at least 2 GPUs.") pytest.skip("cast_master_weights_to_fp8 test needs at least 2 GPUs.")
fp8_available, reason_for_no_fp8 = FP8GlobalStateManager.is_fp8_available() fp8_available, reason_for_no_fp8 = FP8GlobalStateManager.is_fp8_available()
fp8_block_scaling_available, reason_for_no_fp8_block_scaling = (
FP8GlobalStateManager.is_fp8_block_scaling_available()
)
TEST_ROOT = Path(__file__).parent.resolve() TEST_ROOT = Path(__file__).parent.resolve()
NUM_PROCS: int = min(2, torch.cuda.device_count()) NUM_PROCS: int = min(2, torch.cuda.device_count())
...@@ -30,6 +33,8 @@ def _run_test(quantization): ...@@ -30,6 +33,8 @@ def _run_test(quantization):
@pytest.mark.parametrize("quantization", ["fp8", "fp8_cs", "fp8_block"]) @pytest.mark.parametrize("quantization", ["fp8", "fp8_cs", "fp8_block"])
def test_cast_master_weights_to_fp8(quantization): def test_cast_master_weights_to_fp8(quantization):
if not fp8_available: if quantization in ("fp8", "fp8_cs") and not fp8_available:
pytest.skip(reason_for_no_fp8) pytest.skip(reason_for_no_fp8)
if quantization == "fp8_block" and not fp8_block_scaling_available:
pytest.skip(reason_for_no_fp8_block_scaling)
_run_test(quantization) _run_test(quantization)
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment