Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
ModelZoo
SOLOv2-pytorch
Commits
64928acc
Unverified
Commit
64928acc
authored
May 13, 2019
by
Kai Chen
Committed by
GitHub
May 13, 2019
Browse files
Rename normalize to norm_cfg (#637)
* rename normalize to norm_cfg * update configs * Update resnet.py
parent
960e614c
Changes
23
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
61 additions
and
31 deletions
+61
-31
mmdet/models/necks/fpn.py
mmdet/models/necks/fpn.py
+5
-8
mmdet/models/shared_heads/res_layer.py
mmdet/models/shared_heads/res_layer.py
+3
-3
mmdet/models/utils/conv_module.py
mmdet/models/utils/conv_module.py
+53
-20
No files found.
mmdet/models/necks/fpn.py
View file @
64928acc
...
...
@@ -18,7 +18,7 @@ class FPN(nn.Module):
add_extra_convs
=
False
,
extra_convs_on_inputs
=
True
,
conv_cfg
=
None
,
norm
alize
=
None
,
norm
_cfg
=
None
,
activation
=
None
):
super
(
FPN
,
self
).
__init__
()
assert
isinstance
(
in_channels
,
list
)
...
...
@@ -27,7 +27,6 @@ class FPN(nn.Module):
self
.
num_ins
=
len
(
in_channels
)
self
.
num_outs
=
num_outs
self
.
activation
=
activation
self
.
with_bias
=
normalize
is
None
if
end_level
==
-
1
:
self
.
backbone_end_level
=
self
.
num_ins
...
...
@@ -51,8 +50,7 @@ class FPN(nn.Module):
out_channels
,
1
,
conv_cfg
=
conv_cfg
,
normalize
=
normalize
,
bias
=
self
.
with_bias
,
norm_cfg
=
norm_cfg
,
activation
=
self
.
activation
,
inplace
=
False
)
fpn_conv
=
ConvModule
(
...
...
@@ -61,8 +59,7 @@ class FPN(nn.Module):
3
,
padding
=
1
,
conv_cfg
=
conv_cfg
,
normalize
=
normalize
,
bias
=
self
.
with_bias
,
norm_cfg
=
norm_cfg
,
activation
=
self
.
activation
,
inplace
=
False
)
...
...
@@ -83,8 +80,8 @@ class FPN(nn.Module):
3
,
stride
=
2
,
padding
=
1
,
normalize
=
normalize
,
bias
=
self
.
with_bias
,
conv_cfg
=
conv_cfg
,
norm_cfg
=
norm_cfg
,
activation
=
self
.
activation
,
inplace
=
False
)
self
.
fpn_convs
.
append
(
extra_fpn_conv
)
...
...
mmdet/models/shared_heads/res_layer.py
View file @
64928acc
...
...
@@ -17,13 +17,13 @@ class ResLayer(nn.Module):
stride
=
2
,
dilation
=
1
,
style
=
'pytorch'
,
norm
alize
=
dict
(
type
=
'BN'
,
requires_grad
=
True
),
norm
_cfg
=
dict
(
type
=
'BN'
,
requires_grad
=
True
),
norm_eval
=
True
,
with_cp
=
False
,
dcn
=
None
):
super
(
ResLayer
,
self
).
__init__
()
self
.
norm_eval
=
norm_eval
self
.
norm
alize
=
norm
alize
self
.
norm
_cfg
=
norm
_cfg
self
.
stage
=
stage
block
,
stage_blocks
=
ResNet
.
arch_settings
[
depth
]
stage_block
=
stage_blocks
[
stage
]
...
...
@@ -39,7 +39,7 @@ class ResLayer(nn.Module):
dilation
=
dilation
,
style
=
style
,
with_cp
=
with_cp
,
norm
alize
=
self
.
norm
alize
,
norm
_cfg
=
self
.
norm
_cfg
,
dcn
=
dcn
)
self
.
add_module
(
'layer{}'
.
format
(
stage
+
1
),
res_layer
)
...
...
mmdet/models/utils/conv_module.py
View file @
64928acc
...
...
@@ -42,6 +42,27 @@ def build_conv_layer(cfg, *args, **kwargs):
class
ConvModule
(
nn
.
Module
):
"""Conv-Norm-Activation block.
Args:
in_channels (int): Same as nn.Conv2d.
out_channels (int): Same as nn.Conv2d.
kernel_size (int or tuple[int]): Same as nn.Conv2d.
stride (int or tuple[int]): Same as nn.Conv2d.
padding (int or tuple[int]): Same as nn.Conv2d.
dilation (int or tuple[int]): Same as nn.Conv2d.
groups (int): Same as nn.Conv2d.
bias (bool or str): If specified as `auto`, it will be decided by the
norm_cfg. Bias will be set as True if norm_cfg is None, otherwise
False.
conv_cfg (dict): Config dict for convolution layer.
norm_cfg (dict): Config dict for normalization layer.
activation (str or None): Activation type, "ReLU" by default.
inplace (bool): Whether to use inplace mode for activation.
activate_last (bool): Whether to apply the activation layer in the
last. (Do not use this flag since the behavior and api may be
changed in the future.)
"""
def
__init__
(
self
,
in_channels
,
...
...
@@ -51,35 +72,42 @@ class ConvModule(nn.Module):
padding
=
0
,
dilation
=
1
,
groups
=
1
,
bias
=
True
,
bias
=
'auto'
,
conv_cfg
=
None
,
norm
alize
=
None
,
norm
_cfg
=
None
,
activation
=
'relu'
,
inplace
=
True
,
activate_last
=
True
):
super
(
ConvModule
,
self
).
__init__
()
assert
conv_cfg
is
None
or
isinstance
(
conv_cfg
,
dict
)
assert
normalize
is
None
or
isinstance
(
normalize
,
dict
)
self
.
with_norm
=
normalize
is
not
None
self
.
with_activatation
=
activation
is
not
None
self
.
with_bias
=
bias
assert
norm_cfg
is
None
or
isinstance
(
norm_cfg
,
dict
)
self
.
conv_cfg
=
conv_cfg
self
.
norm_cfg
=
norm_cfg
self
.
activation
=
activation
self
.
inplace
=
inplace
self
.
activate_last
=
activate_last
self
.
with_norm
=
norm_cfg
is
not
None
self
.
with_activatation
=
activation
is
not
None
# if the conv layer is before a norm layer, bias is unnecessary.
if
bias
==
'auto'
:
bias
=
False
if
self
.
with_norm
else
True
self
.
with_bias
=
bias
if
self
.
with_norm
and
self
.
with_bias
:
warnings
.
warn
(
'ConvModule has norm and bias at the same time'
)
self
.
conv
=
build
_
conv
_
layer
(
conv_cfg
,
in_channels
,
out_channels
,
kernel_size
,
stride
,
padding
,
dilation
,
groups
,
bias
=
bias
)
#
build
conv
olution
layer
self
.
conv
=
build_conv_layer
(
conv_cfg
,
in_channels
,
out_channels
,
kernel_size
,
stride
=
stride
,
padding
=
padding
,
dilation
=
dilation
,
groups
=
groups
,
bias
=
bias
)
# export the attributes of self.conv to a higher level for convenience
self
.
in_channels
=
self
.
conv
.
in_channels
self
.
out_channels
=
self
.
conv
.
out_channels
self
.
kernel_size
=
self
.
conv
.
kernel_size
...
...
@@ -90,17 +118,21 @@ class ConvModule(nn.Module):
self
.
output_padding
=
self
.
conv
.
output_padding
self
.
groups
=
self
.
conv
.
groups
# build normalization layers
if
self
.
with_norm
:
norm_channels
=
out_channels
if
self
.
activate_last
else
in_channels
self
.
norm_name
,
norm
=
build_norm_layer
(
norm
alize
,
norm_channels
)
self
.
norm_name
,
norm
=
build_norm_layer
(
norm
_cfg
,
norm_channels
)
self
.
add_module
(
self
.
norm_name
,
norm
)
# build activation layer
if
self
.
with_activatation
:
assert
activation
in
[
'relu'
],
'Only ReLU supported.'
if
self
.
activation
not
in
[
'relu'
]:
raise
ValueError
(
'{} is currently not supported.'
.
format
(
self
.
activation
))
if
self
.
activation
==
'relu'
:
self
.
activate
=
nn
.
ReLU
(
inplace
=
inplace
)
#
Default using msra ini
t
#
Use msra init by defaul
t
self
.
init_weights
()
@
property
...
...
@@ -121,6 +153,7 @@ class ConvModule(nn.Module):
if
activate
and
self
.
with_activatation
:
x
=
self
.
activate
(
x
)
else
:
# WARN: this may be removed or modified
if
norm
and
self
.
with_norm
:
x
=
self
.
norm
(
x
)
if
activate
and
self
.
with_activatation
:
...
...
Prev
1
2
Next
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment