Unverified Commit a3c0e67c authored by Nicolas Hug's avatar Nicolas Hug Committed by GitHub
Browse files

increase tol (#3320)


Co-authored-by: default avatarVasilis Vryniotis <datumbox@users.noreply.github.com>
parent 9aa07224
...@@ -549,7 +549,7 @@ class DeformConvTester(OpTester, unittest.TestCase): ...@@ -549,7 +549,7 @@ class DeformConvTester(OpTester, unittest.TestCase):
out_channels = 2 out_channels = 2
kernel_size = (3, 2) kernel_size = (3, 2)
groups = 2 groups = 2
tol = 1e-3 if dtype is torch.half else 1e-5 tol = 2e-3 if dtype is torch.half else 1e-5
layer = ops.DeformConv2d(in_channels, out_channels, kernel_size, stride=stride, padding=padding, layer = ops.DeformConv2d(in_channels, out_channels, kernel_size, stride=stride, padding=padding,
dilation=dilation, groups=groups).to(device=x.device, dtype=dtype) dilation=dilation, groups=groups).to(device=x.device, dtype=dtype)
...@@ -651,7 +651,6 @@ class DeformConvTester(OpTester, unittest.TestCase): ...@@ -651,7 +651,6 @@ class DeformConvTester(OpTester, unittest.TestCase):
@unittest.skipIf(not torch.cuda.is_available(), "CUDA unavailable") @unittest.skipIf(not torch.cuda.is_available(), "CUDA unavailable")
def test_autocast(self): def test_autocast(self):
set_rng_seed(0)
for dtype in (torch.float, torch.half): for dtype in (torch.float, torch.half):
with torch.cuda.amp.autocast(): with torch.cuda.amp.autocast():
self._test_forward(torch.device("cuda"), False, dtype=dtype) self._test_forward(torch.device("cuda"), False, dtype=dtype)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment