@@ -95,6 +95,7 @@ class TestFusedAdam(TestFusedOptimizer):
self.ref_optim=torch.optim.Adam
self.fused_optim=apex.optimizers.FusedAdam
@unittest.skip("Skipped the test since a regression introduced from PyTorch upstream: due to https://github.com/pytorch/pytorch/issues/80809#issuecomment-1175211598. Please also refer to https://github.com/ROCmSoftwarePlatform/apex/issues/82")
deftest_float(self):
self.gen_single_type_test(param_type=torch.float)
...
...
@@ -107,6 +108,7 @@ class TestFusedAdam(TestFusedOptimizer):
@unittest.skip("Skipped the test since a regression introduced from PyTorch upstream: due to https://github.com/pytorch/pytorch/issues/80809#issuecomment-1175211598. Please also refer to https://github.com/ROCmSoftwarePlatform/apex/issues/82")
@unittest.skipIf(torch.cuda.device_count()<2,"more than 1 GPU required")
deftest_multi_device(self):
devices=("cuda:0","cuda:1")
...
...
@@ -173,6 +175,7 @@ class TestFusedAdam(TestFusedOptimizer):
@unittest.skip("Skipped the test since a regression introduced from PyTorch upstream: due to https://github.com/pytorch/pytorch/issues/80809#issuecomment-1175211598. Please also refer to https://github.com/ROCmSoftwarePlatform/apex/issues/82")
@unittest.skip("Skipped the test since it failed the accuracy test on the PyTorch as of 8/1/2022. Please refer to https://github.com/ROCmSoftwarePlatform/apex/issues/83")
@unittest.skipIf(torch.cuda.device_count()<2,"more than 1 GPU required")