Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
apex
Commits
964e61f1
Commit
964e61f1
authored
Oct 26, 2021
by
hubertlu
Browse files
Enable MLP unit tests on ROCm
parent
d36b3c63
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
2 additions
and
7 deletions
+2
-7
tests/L0/run_mlp/test_mlp.py
tests/L0/run_mlp/test_mlp.py
+2
-7
No files found.
tests/L0/run_mlp/test_mlp.py
View file @
964e61f1
...
@@ -18,7 +18,6 @@ class TestMLP(unittest.TestCase):
...
@@ -18,7 +18,6 @@ class TestMLP(unittest.TestCase):
def
test_creation
(
self
):
def
test_creation
(
self
):
MLP
(
mlp_sizes
)
MLP
(
mlp_sizes
)
@
skipIfRocm
def
test_numeric
(
self
):
def
test_numeric
(
self
):
mlp
=
MLP
(
mlp_sizes
).
cuda
()
mlp
=
MLP
(
mlp_sizes
).
cuda
()
...
@@ -53,7 +52,6 @@ class TestMLP(unittest.TestCase):
...
@@ -53,7 +52,6 @@ class TestMLP(unittest.TestCase):
ref_mlp
[
0
].
bias
.
grad
.
detach
().
cpu
().
numpy
(),
ref_mlp
[
0
].
bias
.
grad
.
detach
().
cpu
().
numpy
(),
atol
=
1e-7
,
rtol
=
1e-5
)
atol
=
1e-7
,
rtol
=
1e-5
)
@
skipIfRocm
def
test_no_bias
(
self
):
def
test_no_bias
(
self
):
for
use_activation
in
[
'none'
,
'relu'
,
'sigmoid'
]:
for
use_activation
in
[
'none'
,
'relu'
,
'sigmoid'
]:
mlp
=
MLP
(
mlp_sizes
,
bias
=
False
,
activation
=
use_activation
).
cuda
()
mlp
=
MLP
(
mlp_sizes
,
bias
=
False
,
activation
=
use_activation
).
cuda
()
...
@@ -91,7 +89,6 @@ class TestMLP(unittest.TestCase):
...
@@ -91,7 +89,6 @@ class TestMLP(unittest.TestCase):
ref_mlp
[
0
].
weight
.
grad
.
detach
().
cpu
().
numpy
(),
ref_mlp
[
0
].
weight
.
grad
.
detach
().
cpu
().
numpy
(),
atol
=
1e-7
,
rtol
=
100
)
atol
=
1e-7
,
rtol
=
100
)
@
skipIfRocm
def
test_with_bias
(
self
):
def
test_with_bias
(
self
):
for
use_activation
in
[
'none'
,
'relu'
,
'sigmoid'
]:
for
use_activation
in
[
'none'
,
'relu'
,
'sigmoid'
]:
mlp
=
MLP
(
mlp_sizes
,
bias
=
True
,
activation
=
use_activation
).
cuda
()
mlp
=
MLP
(
mlp_sizes
,
bias
=
True
,
activation
=
use_activation
).
cuda
()
...
@@ -134,7 +131,6 @@ class TestMLP(unittest.TestCase):
...
@@ -134,7 +131,6 @@ class TestMLP(unittest.TestCase):
ref_mlp
[
0
].
bias
.
grad
.
detach
().
cpu
().
numpy
(),
ref_mlp
[
0
].
bias
.
grad
.
detach
().
cpu
().
numpy
(),
atol
=
1e-7
,
rtol
=
1e-5
)
atol
=
1e-7
,
rtol
=
1e-5
)
@
skipIfRocm
def
test_no_grad
(
self
):
def
test_no_grad
(
self
):
mlp
=
MLP
(
mlp_sizes
).
cuda
()
mlp
=
MLP
(
mlp_sizes
).
cuda
()
...
@@ -165,7 +161,6 @@ class TestMLP(unittest.TestCase):
...
@@ -165,7 +161,6 @@ class TestMLP(unittest.TestCase):
ref_mlp
[
0
].
weight
.
grad
.
detach
().
cpu
().
numpy
(),
ref_mlp
[
0
].
weight
.
grad
.
detach
().
cpu
().
numpy
(),
atol
=
1e-7
,
rtol
=
1e-5
)
atol
=
1e-7
,
rtol
=
1e-5
)
@
skipIfRocm
def
test_performance_half
(
self
):
def
test_performance_half
(
self
):
mlp
=
MLP
(
mlp_sizes
).
cuda
().
half
()
mlp
=
MLP
(
mlp_sizes
).
cuda
().
half
()
...
@@ -195,7 +190,7 @@ class TestMLP(unittest.TestCase):
...
@@ -195,7 +190,7 @@ class TestMLP(unittest.TestCase):
mlp
.
zero_grad
()
mlp
.
zero_grad
()
test_loss
.
backward
()
test_loss
.
backward
()
torch
.
cuda
.
profiler
.
start
()
#
torch.cuda.profiler.start()
torch
.
cuda
.
synchronize
()
torch
.
cuda
.
synchronize
()
start_time
=
time
()
start_time
=
time
()
for
_
in
range
(
num_iters
):
for
_
in
range
(
num_iters
):
...
@@ -217,7 +212,7 @@ class TestMLP(unittest.TestCase):
...
@@ -217,7 +212,7 @@ class TestMLP(unittest.TestCase):
torch
.
cuda
.
synchronize
()
torch
.
cuda
.
synchronize
()
stop_time
=
time
()
stop_time
=
time
()
print
(
F
"C++ MLP time
{
(
stop_time
-
start_time
)
*
1000.
/
num_iters
:.
4
f
}
ms"
)
print
(
F
"C++ MLP time
{
(
stop_time
-
start_time
)
*
1000.
/
num_iters
:.
4
f
}
ms"
)
torch
.
cuda
.
profiler
.
stop
()
#
torch.cuda.profiler.stop()
if
__name__
==
'__main__'
:
if
__name__
==
'__main__'
:
unittest
.
main
()
unittest
.
main
()
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment