Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
apex
Commits
76e4e054
Unverified
Commit
76e4e054
authored
Dec 31, 2020
by
Chaitanya Sri Krishna Lolla
Committed by
GitHub
Dec 31, 2020
Browse files
Merge pull request #41 from lcskrishna/cl/skip-tests
Skip the unit tests
parents
663d5a4d
41bbf93c
Changes
4
Show whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
12 additions
and
1 deletion
+12
-1
tests/L0/run_amp/test_multi_tensor_axpby.py
tests/L0/run_amp/test_multi_tensor_axpby.py
+1
-0
tests/L0/run_amp/test_multi_tensor_scale.py
tests/L0/run_amp/test_multi_tensor_scale.py
+3
-0
tests/L0/run_optimizers/test_adagrad.py
tests/L0/run_optimizers/test_adagrad.py
+4
-1
tests/L0/run_optimizers/test_adam.py
tests/L0/run_optimizers/test_adam.py
+4
-0
No files found.
tests/L0/run_amp/test_multi_tensor_axpby.py
View file @
76e4e054
...
...
@@ -103,6 +103,7 @@ class TestMultiTensorAxpby(unittest.TestCase):
# self.assertTrue(self.overflow_buf.item())
@
unittest
.
skipIf
(
disabled
,
"amp_C is unavailable"
)
@
skipIfRocm
def
test_fuzz
(
self
):
input_size_pairs
=
(
(
7777
*
77
,
555
*
555
),
...
...
tests/L0/run_amp/test_multi_tensor_scale.py
View file @
76e4e054
...
...
@@ -11,6 +11,8 @@ import torch.nn.functional as F
from
utils
import
common_init
,
HALF
,
FLOAT
,
\
ALWAYS_HALF
,
ALWAYS_FLOAT
,
MATCH_INPUT
from
apex.testing.common_utils
import
skipIfRocm
try
:
import
amp_C
from
amp_C
import
multi_tensor_scale
...
...
@@ -88,6 +90,7 @@ class TestMultiTensorScale(unittest.TestCase):
# self.downscale(self.fp32, self.fp16, self.fp16_ref)
@
unittest
.
skipIf
(
disabled
,
"amp_C is unavailable"
)
@
skipIfRocm
def
test_fuzz
(
self
):
input_size_pairs
=
(
(
7777
*
77
,
555
*
555
),
...
...
tests/L0/run_optimizers/test_adagrad.py
View file @
76e4e054
...
...
@@ -2,7 +2,7 @@ import unittest
import
apex
import
torch
from
apex.testing.common_utils
import
skipIfRocm
class
TestFusedAdagrad
(
unittest
.
TestCase
):
def
setUp
(
self
,
max_abs_diff
=
1e-6
,
max_rel_diff
=
1
,
iters
=
7
):
...
...
@@ -78,6 +78,7 @@ class TestFusedAdagrad(unittest.TestCase):
if
not
apex_only
:
self
.
assertLessEqual
(
max_rel_diff
,
self
.
max_rel_diff
)
@
skipIfRocm
def
test_float
(
self
):
self
.
gen_single_type_test
(
param_type
=
torch
.
float
)
...
...
@@ -89,10 +90,12 @@ class TestFusedAdagrad(unittest.TestCase):
# Uses apex optimizers(controlled by apex_only flag) for both types.
# Doesn't use upstream optimizer like other tests as they seem to be
# numerically unstable for half types(see skip note for test above).
@
skipIfRocm
def
test_bfloat16
(
self
):
self
.
max_abs_diff
=
1e-2
self
.
gen_single_type_test
(
param_type
=
torch
.
bfloat16
,
apex_only
=
True
)
@
skipIfRocm
def
test_multi_params
(
self
):
sizes
=
[[
4096
,
1024
],
[
4096
],
[
4096
,
2048
],
[
32320
,
1024
],
[
1
]]
adagrad_option
=
{
"lr"
:
5e-4
,
"eps"
:
1e-08
,
"weight_decay"
:
0
}
...
...
tests/L0/run_optimizers/test_adam.py
View file @
76e4e054
...
...
@@ -5,6 +5,8 @@ import random
import
torch
import
apex
from
apex.testing.common_utils
import
skipIfRocm
class
TestFusedAdam
(
unittest
.
TestCase
):
def
setUp
(
self
,
max_abs_diff
=
1e-3
,
max_rel_diff
=
1
,
iters
=
7
):
self
.
max_abs_diff
=
max_abs_diff
...
...
@@ -77,6 +79,7 @@ class TestFusedAdam(unittest.TestCase):
if
not
apex_only
:
self
.
assertLessEqual
(
max_rel_diff
,
self
.
max_rel_diff
)
@
skipIfRocm
def
test_float
(
self
):
self
.
gen_single_type_test
(
param_type
=
torch
.
float
)
...
...
@@ -87,6 +90,7 @@ class TestFusedAdam(unittest.TestCase):
# Uses apex optimizers(controlled by apex_only flag) for both types.
# Doesn't use upstream optimizer like other tests as they seem to be
# numerically unstable for half types
@
skipIfRocm
def
test_bfloat16
(
self
):
self
.
max_abs_diff
=
1e-2
self
.
gen_single_type_test
(
param_type
=
torch
.
bfloat16
,
apex_only
=
True
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment