Unverified Commit 1398fa5f authored by Paweł Gadziński's avatar Paweł Gadziński Committed by GitHub
Browse files

[PyTorch Debug] Skip log test on device if it does not support fp8. (#2109)



fix test on old device
Signed-off-by: default avatarPawel Gadzinski <pgadzinski@nvidia.com>
parent d3706087
......@@ -119,6 +119,9 @@ def read_log(log_dir: str) -> str:
def test_sanity(feature_dirs):
if not fp8_available:
pytest.skip(reason_for_no_fp8)
log_all_stats_config = LOG_QUANTIZED_CONFIG_BASE.format(stats=", ".join(all_stats))
with debug_session(log_all_stats_config, feature_dirs) as log_dir:
model = te.Linear(128, 128, params_dtype=torch.bfloat16)
......@@ -207,6 +210,9 @@ def test_numerics(fp8_recipe, feature_dirs):
@pytest.mark.parametrize("layer", ["linear", "transformer"])
def test_log_every_3_or_5_layers(layer, configs_dir, feature_dirs):
if not fp8_available:
pytest.skip(reason_for_no_fp8)
# If layer does not invoke any feature in current iteration,
# then it changed into non-debug mode.
# This test checks whether this works correctly -
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment