Commit 5bae299e authored by lcskrishna's avatar lcskrishna
Browse files

skip the unit tests

parent 663d5a4d
......@@ -103,6 +103,7 @@ class TestMultiTensorAxpby(unittest.TestCase):
# self.assertTrue(self.overflow_buf.item())
@unittest.skipIf(disabled, "amp_C is unavailable")
@skipIfRocm
def test_fuzz(self):
input_size_pairs = (
(7777*77, 555*555),
......
......@@ -11,6 +11,8 @@ import torch.nn.functional as F
from utils import common_init, HALF, FLOAT,\
ALWAYS_HALF, ALWAYS_FLOAT, MATCH_INPUT
from apex.testing.common_utils import skipIfRocm
try:
import amp_C
from amp_C import multi_tensor_scale
......@@ -88,6 +90,7 @@ class TestMultiTensorScale(unittest.TestCase):
# self.downscale(self.fp32, self.fp16, self.fp16_ref)
@unittest.skipIf(disabled, "amp_C is unavailable")
@skipIfRocm
def test_fuzz(self):
input_size_pairs = (
(7777*77, 555*555),
......
......@@ -2,7 +2,7 @@ import unittest
import apex
import torch
from apex.testing.common_utils import skipIfRocm
class TestFusedAdagrad(unittest.TestCase):
def setUp(self, max_abs_diff=1e-6, max_rel_diff=1, iters=7):
......@@ -78,6 +78,7 @@ class TestFusedAdagrad(unittest.TestCase):
if not apex_only:
self.assertLessEqual(max_rel_diff, self.max_rel_diff)
@skipIfRocm
def test_float(self):
self.gen_single_type_test(param_type=torch.float)
......@@ -89,10 +90,12 @@ class TestFusedAdagrad(unittest.TestCase):
# Uses apex optimizers(controlled by apex_only flag) for both types.
# Doesn't use upstream optimizer like other tests as they seem to be
# numerically unstable for half types(see skip note for test above).
@skipIfRocm
def test_bfloat16(self):
self.max_abs_diff = 1e-2
self.gen_single_type_test(param_type=torch.bfloat16, apex_only=True)
@skipIfRocm
def test_multi_params(self):
sizes = [[4096, 1024], [4096], [4096, 2048], [32320, 1024], [1]]
adagrad_option = {"lr": 5e-4, "eps": 1e-08, "weight_decay": 0}
......
......@@ -77,6 +77,7 @@ class TestFusedAdam(unittest.TestCase):
if not apex_only:
self.assertLessEqual(max_rel_diff, self.max_rel_diff)
@skipIfRocm
def test_float(self):
self.gen_single_type_test(param_type=torch.float)
......@@ -87,6 +88,7 @@ class TestFusedAdam(unittest.TestCase):
# Uses apex optimizers(controlled by apex_only flag) for both types.
# Doesn't use upstream optimizer like other tests as they seem to be
# numerically unstable for half types
@skipIfRocm
def test_bfloat16(self):
self.max_abs_diff = 1e-2
self.gen_single_type_test(param_type=torch.bfloat16, apex_only=True)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment