Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
MMCV
Commits
841a078e
Unverified
Commit
841a078e
authored
Apr 24, 2021
by
Rui Xu
Committed by
GitHub
Apr 24, 2021
Browse files
[Fix]: fix data type in fused-bias-leakyrelu for apex fp16 training (#981)
parent
9649a9ad
Changes
1
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
5 additions
and
5 deletions
+5
-5
mmcv/ops/fused_bias_leakyrelu.py
mmcv/ops/fused_bias_leakyrelu.py
+5
-5
No files found.
mmcv/ops/fused_bias_leakyrelu.py
View file @
841a078e
...
@@ -45,10 +45,9 @@ class FusedBiasLeakyReLUFunctionBackward(Function):
...
@@ -45,10 +45,9 @@ class FusedBiasLeakyReLUFunctionBackward(Function):
# The second order deviation, in fact, contains two parts, while the
# The second order deviation, in fact, contains two parts, while the
# the first part is zero. Thus, we direct consider the second part
# the first part is zero. Thus, we direct consider the second part
# which is similar with the first order deviation in implementation.
# which is similar with the first order deviation in implementation.
gradgrad_out
=
ext_module
.
fused_bias_leakyrelu
(
gradgrad_input
,
gradgrad_out
=
ext_module
.
fused_bias_leakyrelu
(
gradgrad_bias
,
out
,
3
,
gradgrad_input
,
gradgrad_bias
.
to
(
out
.
dtype
),
out
,
3
,
1
,
1
,
ctx
.
negative_slope
,
ctx
.
negative_slope
,
ctx
.
scale
)
ctx
.
scale
)
return
gradgrad_out
,
None
,
None
,
None
return
gradgrad_out
,
None
,
None
,
None
...
@@ -139,7 +138,8 @@ def fused_bias_leakyrelu(input, bias, negative_slope=0.2, scale=2**0.5):
...
@@ -139,7 +138,8 @@ def fused_bias_leakyrelu(input, bias, negative_slope=0.2, scale=2**0.5):
if
not
input
.
is_cuda
:
if
not
input
.
is_cuda
:
return
bias_leakyrelu_ref
(
input
,
bias
,
negative_slope
,
scale
)
return
bias_leakyrelu_ref
(
input
,
bias
,
negative_slope
,
scale
)
return
FusedBiasLeakyReLUFunction
.
apply
(
input
,
bias
,
negative_slope
,
scale
)
return
FusedBiasLeakyReLUFunction
.
apply
(
input
,
bias
.
to
(
input
.
dtype
),
negative_slope
,
scale
)
def
bias_leakyrelu_ref
(
x
,
bias
,
negative_slope
=
0.2
,
scale
=
2
**
0.5
):
def
bias_leakyrelu_ref
(
x
,
bias
,
negative_slope
=
0.2
,
scale
=
2
**
0.5
):
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment