Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
ModelZoo
SOLOv2-pytorch
Commits
574a920a
"git@developer.sourcefind.cn:zhaoyu6/sglang.git" did not exist on "f8f9244a61544fd90800d94859ed5609798e100d"
Commit
574a920a
authored
Jan 03, 2019
by
ThangVu
Browse files
revise group norm (4)
parent
3fdd041c
Changes
3
Show whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
72 additions
and
35 deletions
+72
-35
mmdet/models/backbones/resnet.py
mmdet/models/backbones/resnet.py
+48
-24
mmdet/models/utils/conv_module.py
mmdet/models/utils/conv_module.py
+8
-4
mmdet/models/utils/norm.py
mmdet/models/utils/norm.py
+16
-7
No files found.
mmdet/models/backbones/resnet.py
View file @
574a920a
...
...
@@ -35,9 +35,8 @@ class BasicBlock(nn.Module):
super
(
BasicBlock
,
self
).
__init__
()
self
.
conv1
=
conv3x3
(
inplanes
,
planes
,
stride
,
dilation
)
# build_norm_layer return: (norm_name, norm_layer)
self
.
norm1
,
norm1
=
build_norm_layer
(
normalize
,
planes
,
postfix
=
1
)
self
.
norm2
,
norm2
=
build_norm_layer
(
normalize
,
planes
,
postfix
=
2
)
self
.
norm1_name
,
norm1
=
build_norm_layer
(
normalize
,
planes
,
postfix
=
1
)
self
.
norm2_name
,
norm2
=
build_norm_layer
(
normalize
,
planes
,
postfix
=
2
)
self
.
add_module
(
self
.
norm1
,
norm1
)
self
.
add_module
(
self
.
norm2
,
norm2
)
...
...
@@ -48,15 +47,23 @@ class BasicBlock(nn.Module):
self
.
dilation
=
dilation
assert
not
with_cp
@
property
def
norm1
(
self
):
return
getattr
(
self
,
self
.
norm1_name
)
@
property
def
norm2
(
self
):
return
getattr
(
self
,
self
.
norm2_name
)
def
forward
(
self
,
x
):
identity
=
x
out
=
self
.
conv1
(
x
)
out
=
getattr
(
self
,
self
.
norm1
)
(
out
)
out
=
self
.
norm1
(
out
)
out
=
self
.
relu
(
out
)
out
=
self
.
conv2
(
out
)
out
=
getattr
(
self
,
self
.
norm2
)
(
out
)
out
=
self
.
norm2
(
out
)
if
self
.
downsample
is
not
None
:
identity
=
self
.
downsample
(
x
)
...
...
@@ -108,14 +115,14 @@ class Bottleneck(nn.Module):
dilation
=
dilation
,
bias
=
False
)
# build_norm_layer return: (norm_name, norm_layer
)
self
.
norm
1
,
norm
1
=
build_norm_layer
(
normalize
,
planes
,
postfix
=
1
)
self
.
norm
2
,
norm
2
=
build_norm_layer
(
normalize
,
planes
,
postfix
=
2
)
self
.
norm3
,
norm3
=
build_norm_layer
(
normalize
,
planes
*
self
.
expansion
,
self
.
norm1_name
,
norm1
=
build_norm_layer
(
normalize
,
planes
,
postfix
=
1
)
self
.
norm
2_name
,
norm
2
=
build_norm_layer
(
normalize
,
planes
,
postfix
=
2
)
self
.
norm
3_name
,
norm
3
=
build_norm_layer
(
normalize
,
planes
*
self
.
expansion
,
postfix
=
3
)
self
.
add_module
(
self
.
norm1
,
norm1
)
self
.
add_module
(
self
.
norm2
,
norm2
)
self
.
add_module
(
self
.
norm3
,
norm3
)
self
.
add_module
(
self
.
norm1
_name
,
norm1
)
self
.
add_module
(
self
.
norm2
_name
,
norm2
)
self
.
add_module
(
self
.
norm3
_name
,
norm3
)
self
.
conv3
=
nn
.
Conv2d
(
planes
,
planes
*
self
.
expansion
,
kernel_size
=
1
,
bias
=
False
)
...
...
@@ -126,21 +133,33 @@ class Bottleneck(nn.Module):
self
.
with_cp
=
with_cp
self
.
normalize
=
normalize
@
property
def
norm1
(
self
):
return
getattr
(
self
,
self
.
norm1_name
)
@
property
def
norm2
(
self
):
return
getattr
(
self
,
self
.
norm2_name
)
@
property
def
norm3
(
self
):
return
getattr
(
self
,
self
.
norm3_name
)
def
forward
(
self
,
x
):
def
_inner_forward
(
x
):
identity
=
x
out
=
self
.
conv1
(
x
)
out
=
getattr
(
self
,
self
.
norm1
)
(
out
)
out
=
self
.
norm1
(
out
)
out
=
self
.
relu
(
out
)
out
=
self
.
conv2
(
out
)
out
=
getattr
(
self
,
self
.
norm2
)
(
out
)
out
=
self
.
norm2
(
out
)
out
=
self
.
relu
(
out
)
out
=
self
.
conv3
(
out
)
out
=
getattr
(
self
,
self
.
norm3
)
(
out
)
out
=
self
.
norm3
(
out
)
if
self
.
downsample
is
not
None
:
identity
=
self
.
downsample
(
x
)
...
...
@@ -293,17 +312,21 @@ class ResNet(nn.Module):
self
.
feat_dim
=
self
.
block
.
expansion
*
64
*
2
**
(
len
(
self
.
stage_blocks
)
-
1
)
@
property
def
norm1
(
self
):
return
getattr
(
self
,
self
.
norm1_name
)
def
_make_stem_layer
(
self
):
self
.
conv1
=
nn
.
Conv2d
(
3
,
64
,
kernel_size
=
7
,
stride
=
2
,
padding
=
3
,
bias
=
False
)
self
.
stem_norm
,
stem_
norm
=
build_norm_layer
(
self
.
normalize
,
self
.
norm1_name
,
norm
1
=
build_norm_layer
(
self
.
normalize
,
64
,
postfix
=
1
)
self
.
add_module
(
self
.
stem_norm
,
stem_
norm
)
self
.
add_module
(
self
.
norm1_name
,
norm
1
)
self
.
relu
=
nn
.
ReLU
(
inplace
=
True
)
self
.
maxpool
=
nn
.
MaxPool2d
(
kernel_size
=
3
,
stride
=
2
,
padding
=
1
)
if
self
.
frozen_stages
>=
0
:
for
m
in
[
self
.
conv1
,
s
tem_
norm
]:
for
m
in
[
self
.
conv1
,
s
elf
.
norm
1
]:
for
param
in
m
.
parameters
():
param
.
requires_grad
=
False
...
...
@@ -327,15 +350,16 @@ class ResNet(nn.Module):
# zero init for last norm layer https://arxiv.org/abs/1706.02677
if
self
.
zero_init_residual
:
for
m
in
self
.
modules
():
if
isinstance
(
m
,
(
Bottleneck
,
BasicBlock
)):
last_norm
=
getattr
(
m
,
m
.
norm_names
[
-
1
])
constant_init
(
last_norm
,
0
)
if
isinstance
(
m
,
Bottleneck
):
constant_init
(
m
.
norm3
,
0
)
elif
isinstance
(
m
,
BasicBlock
):
constant_init
(
m
.
norm2
,
0
)
else
:
raise
TypeError
(
'pretrained must be a str or None'
)
def
forward
(
self
,
x
):
x
=
self
.
conv1
(
x
)
x
=
getattr
(
self
,
self
.
stem_
norm
)
(
x
)
x
=
self
.
norm
1
(
x
)
x
=
self
.
relu
(
x
)
x
=
self
.
maxpool
(
x
)
outs
=
[]
...
...
mmdet/models/utils/conv_module.py
View file @
574a920a
...
...
@@ -53,8 +53,8 @@ class ConvModule(nn.Module):
if
self
.
with_norm
:
norm_channels
=
out_channels
if
self
.
activate_last
else
in_channels
self
.
norm
,
norm
=
build_norm_layer
(
normalize
,
norm_channels
)
self
.
add_module
(
self
.
norm
,
norm
)
self
.
norm
_name
,
norm
=
build_norm_layer
(
normalize
,
norm_channels
)
self
.
add_module
(
self
.
norm
_name
,
norm
)
if
self
.
with_activatation
:
assert
activation
in
[
'relu'
],
'Only ReLU supported.'
...
...
@@ -64,6 +64,10 @@ class ConvModule(nn.Module):
# Default using msra init
self
.
init_weights
()
@
property
def
norm
(
self
):
return
getattr
(
self
,
self
.
norm_name
)
def
init_weights
(
self
):
nonlinearity
=
'relu'
if
self
.
activation
is
None
else
self
.
activation
kaiming_init
(
self
.
conv
,
nonlinearity
=
nonlinearity
)
...
...
@@ -74,12 +78,12 @@ class ConvModule(nn.Module):
if
self
.
activate_last
:
x
=
self
.
conv
(
x
)
if
norm
and
self
.
with_norm
:
x
=
getattr
(
self
,
self
.
norm
)
(
x
)
x
=
self
.
norm
(
x
)
if
activate
and
self
.
with_activatation
:
x
=
self
.
activate
(
x
)
else
:
if
norm
and
self
.
with_norm
:
x
=
getattr
(
self
,
self
.
norm
)
(
x
)
x
=
self
.
norm
(
x
)
if
activate
and
self
.
with_activatation
:
x
=
self
.
activate
(
x
)
x
=
self
.
conv
(
x
)
...
...
mmdet/models/utils/norm.py
View file @
574a920a
...
...
@@ -11,13 +11,22 @@ norm_cfg = {
def
build_norm_layer
(
cfg
,
num_features
,
postfix
=
''
):
"""
cfg should contain:
""" Build normalization layer
Args:
cfg (dict): cfg should contain:
type (str): identify norm layer type.
layer args: args needed to instantiate a norm layer.
frozen (bool): [optional] whether stop gradient updates
of norm layer, it is helpful to set frozen mode
in backbone's norms.
num_features (int): number of channels from input
postfix (int, str): appended into norm abbreation to
create named layer.
Returns:
name (str): abbreation + postfix
layer (nn.Module): created norm layer
"""
assert
isinstance
(
cfg
,
dict
)
and
'type'
in
cfg
cfg_
=
cfg
.
copy
()
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment