Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
ModelZoo
SOLOv2-pytorch
Commits
8e098356
Commit
8e098356
authored
Oct 21, 2018
by
Kai Chen
Browse files
remove useless comments
parent
0868596e
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
5 additions
and
15 deletions
+5
-15
mmdet/models/utils/conv_module.py
mmdet/models/utils/conv_module.py
+5
-15
No files found.
mmdet/models/utils/conv_module.py
View file @
8e098356
import
warnings
import
warnings
import
torch.nn
as
nn
import
torch.nn
as
nn
from
mmcv.cnn
import
kaiming_init
,
constant_init
from
.norm
import
build_norm_layer
from
.norm
import
build_norm_layer
...
@@ -51,15 +52,8 @@ class ConvModule(nn.Module):
...
@@ -51,15 +52,8 @@ class ConvModule(nn.Module):
self
.
groups
=
self
.
conv
.
groups
self
.
groups
=
self
.
conv
.
groups
if
self
.
with_norm
:
if
self
.
with_norm
:
# self.norm_type, self.norm_params = parse_norm(normalize)
norm_channels
=
out_channels
if
self
.
activate_last
else
in_channels
# assert self.norm_type in [None, 'BN', 'SyncBN', 'GN', 'SN']
self
.
norm
=
build_norm_layer
(
normalize
,
norm_channels
)
# self.Norm2d = norm_cfg[self.norm_type]
if
self
.
activate_last
:
self
.
norm
=
build_norm_layer
(
normalize
,
out_channels
)
# self.norm = self.Norm2d(out_channels, **self.norm_params)
else
:
self
.
norm
=
build_norm_layer
(
normalize
,
in_channels
)
# self.norm = self.Norm2d(in_channels, **self.norm_params)
if
self
.
with_activatation
:
if
self
.
with_activatation
:
assert
activation
in
[
'relu'
],
'Only ReLU supported.'
assert
activation
in
[
'relu'
],
'Only ReLU supported.'
...
@@ -71,13 +65,9 @@ class ConvModule(nn.Module):
...
@@ -71,13 +65,9 @@ class ConvModule(nn.Module):
def
init_weights
(
self
):
def
init_weights
(
self
):
nonlinearity
=
'relu'
if
self
.
activation
is
None
else
self
.
activation
nonlinearity
=
'relu'
if
self
.
activation
is
None
else
self
.
activation
nn
.
init
.
kaiming_normal_
(
kaiming_init
(
self
.
conv
,
nonlinearity
=
nonlinearity
)
self
.
conv
.
weight
,
mode
=
'fan_out'
,
nonlinearity
=
nonlinearity
)
if
self
.
with_bias
:
nn
.
init
.
constant_
(
self
.
conv
.
bias
,
0
)
if
self
.
with_norm
:
if
self
.
with_norm
:
nn
.
init
.
constant_
(
self
.
norm
.
weight
,
1
)
constant_init
(
self
.
norm
,
1
,
bias
=
0
)
nn
.
init
.
constant_
(
self
.
norm
.
bias
,
0
)
def
forward
(
self
,
x
,
activate
=
True
,
norm
=
True
):
def
forward
(
self
,
x
,
activate
=
True
,
norm
=
True
):
if
self
.
activate_last
:
if
self
.
activate_last
:
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment