Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
MMCV
Commits
d8ac46df
Commit
d8ac46df
authored
Sep 29, 2022
by
takuoko
Committed by
Zaida Zhou
Oct 22, 2022
Browse files
[Enhancement] Support SiLU with torch < 1.7.0
parent
dfef1529
Changes
2
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
32 additions
and
9 deletions
+32
-9
mmcv/cnn/bricks/activation.py
mmcv/cnn/bricks/activation.py
+17
-1
tests/test_cnn/test_silu.py
tests/test_cnn/test_silu.py
+15
-8
No files found.
mmcv/cnn/bricks/activation.py
View file @
d8ac46df
...
...
@@ -15,7 +15,23 @@ for module in [
MODELS
.
register_module
(
module
=
module
)
if
digit_version
(
torch
.
__version__
)
>=
digit_version
(
'1.7.0'
):
MODELS
.
register_module
(
module
=
nn
.
SiLU
)
MODELS
.
register_module
(
module
=
nn
.
SiLU
,
name
=
'SiLU'
)
else
:
class
SiLU
(
nn
.
Module
):
"""Sigmoid Weighted Liner Unit."""
def
__init__
(
self
,
inplace
=
False
):
super
().
__init__
()
self
.
inplace
=
inplace
def
forward
(
self
,
inputs
)
->
torch
.
Tensor
:
if
self
.
inplace
:
return
inputs
.
mul_
(
torch
.
sigmoid
(
inputs
))
else
:
return
inputs
*
torch
.
sigmoid
(
inputs
)
MODELS
.
register_module
(
module
=
SiLU
,
name
=
'SiLU'
)
@
MODELS
.
register_module
(
name
=
'Clip'
)
...
...
tests/test_cnn/test_silu.py
View file @
d8ac46df
# Copyright (c) OpenMMLab. All rights reserved.
import
pytest
import
torch
import
torch.nn.functional
as
F
from
mmcv.cnn.bricks
import
build_activation_layer
from
mmcv.utils
import
digit_version
@
pytest
.
mark
.
skipif
(
digit_version
(
torch
.
__version__
)
<
digit_version
(
'1.7.0'
),
reason
=
'torch.nn.SiLU is not available before 1.7.0'
)
def
test_silu
():
act
=
build_activation_layer
(
dict
(
type
=
'SiLU'
))
input
=
torch
.
randn
(
1
,
3
,
64
,
64
)
expected_output
=
F
.
silu
(
input
)
expected_output
=
input
*
torch
.
sigmoid
(
input
)
output
=
act
(
input
)
# test output shape
assert
output
.
shape
==
expected_output
.
shape
# test output value
assert
torch
.
equal
(
output
,
expected_output
)
assert
torch
.
allclose
(
output
,
expected_output
)
# test inplace
act
=
build_activation_layer
(
dict
(
type
=
'SiLU'
,
inplace
=
True
))
assert
act
.
inplace
input
=
torch
.
randn
(
1
,
3
,
64
,
64
)
expected_output
=
input
*
torch
.
sigmoid
(
input
)
output
=
act
(
input
)
# test output shape
assert
output
.
shape
==
expected_output
.
shape
# test output value
assert
torch
.
allclose
(
output
,
expected_output
)
assert
torch
.
allclose
(
input
,
expected_output
)
assert
input
is
output
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment