Unverified Commit 64928acc authored by Kai Chen's avatar Kai Chen Committed by GitHub
Browse files

Rename normalize to norm_cfg (#637)

* rename normalize to norm_cfg

* update configs

* Update resnet.py
parent 960e614c
...@@ -18,7 +18,7 @@ class FPN(nn.Module): ...@@ -18,7 +18,7 @@ class FPN(nn.Module):
add_extra_convs=False, add_extra_convs=False,
extra_convs_on_inputs=True, extra_convs_on_inputs=True,
conv_cfg=None, conv_cfg=None,
normalize=None, norm_cfg=None,
activation=None): activation=None):
super(FPN, self).__init__() super(FPN, self).__init__()
assert isinstance(in_channels, list) assert isinstance(in_channels, list)
...@@ -27,7 +27,6 @@ class FPN(nn.Module): ...@@ -27,7 +27,6 @@ class FPN(nn.Module):
self.num_ins = len(in_channels) self.num_ins = len(in_channels)
self.num_outs = num_outs self.num_outs = num_outs
self.activation = activation self.activation = activation
self.with_bias = normalize is None
if end_level == -1: if end_level == -1:
self.backbone_end_level = self.num_ins self.backbone_end_level = self.num_ins
...@@ -51,8 +50,7 @@ class FPN(nn.Module): ...@@ -51,8 +50,7 @@ class FPN(nn.Module):
out_channels, out_channels,
1, 1,
conv_cfg=conv_cfg, conv_cfg=conv_cfg,
normalize=normalize, norm_cfg=norm_cfg,
bias=self.with_bias,
activation=self.activation, activation=self.activation,
inplace=False) inplace=False)
fpn_conv = ConvModule( fpn_conv = ConvModule(
...@@ -61,8 +59,7 @@ class FPN(nn.Module): ...@@ -61,8 +59,7 @@ class FPN(nn.Module):
3, 3,
padding=1, padding=1,
conv_cfg=conv_cfg, conv_cfg=conv_cfg,
normalize=normalize, norm_cfg=norm_cfg,
bias=self.with_bias,
activation=self.activation, activation=self.activation,
inplace=False) inplace=False)
...@@ -83,8 +80,8 @@ class FPN(nn.Module): ...@@ -83,8 +80,8 @@ class FPN(nn.Module):
3, 3,
stride=2, stride=2,
padding=1, padding=1,
normalize=normalize, conv_cfg=conv_cfg,
bias=self.with_bias, norm_cfg=norm_cfg,
activation=self.activation, activation=self.activation,
inplace=False) inplace=False)
self.fpn_convs.append(extra_fpn_conv) self.fpn_convs.append(extra_fpn_conv)
......
...@@ -17,13 +17,13 @@ class ResLayer(nn.Module): ...@@ -17,13 +17,13 @@ class ResLayer(nn.Module):
stride=2, stride=2,
dilation=1, dilation=1,
style='pytorch', style='pytorch',
normalize=dict(type='BN', requires_grad=True), norm_cfg=dict(type='BN', requires_grad=True),
norm_eval=True, norm_eval=True,
with_cp=False, with_cp=False,
dcn=None): dcn=None):
super(ResLayer, self).__init__() super(ResLayer, self).__init__()
self.norm_eval = norm_eval self.norm_eval = norm_eval
self.normalize = normalize self.norm_cfg = norm_cfg
self.stage = stage self.stage = stage
block, stage_blocks = ResNet.arch_settings[depth] block, stage_blocks = ResNet.arch_settings[depth]
stage_block = stage_blocks[stage] stage_block = stage_blocks[stage]
...@@ -39,7 +39,7 @@ class ResLayer(nn.Module): ...@@ -39,7 +39,7 @@ class ResLayer(nn.Module):
dilation=dilation, dilation=dilation,
style=style, style=style,
with_cp=with_cp, with_cp=with_cp,
normalize=self.normalize, norm_cfg=self.norm_cfg,
dcn=dcn) dcn=dcn)
self.add_module('layer{}'.format(stage + 1), res_layer) self.add_module('layer{}'.format(stage + 1), res_layer)
......
...@@ -42,6 +42,27 @@ def build_conv_layer(cfg, *args, **kwargs): ...@@ -42,6 +42,27 @@ def build_conv_layer(cfg, *args, **kwargs):
class ConvModule(nn.Module): class ConvModule(nn.Module):
"""Conv-Norm-Activation block.
Args:
in_channels (int): Same as nn.Conv2d.
out_channels (int): Same as nn.Conv2d.
kernel_size (int or tuple[int]): Same as nn.Conv2d.
stride (int or tuple[int]): Same as nn.Conv2d.
padding (int or tuple[int]): Same as nn.Conv2d.
dilation (int or tuple[int]): Same as nn.Conv2d.
groups (int): Same as nn.Conv2d.
bias (bool or str): If specified as `auto`, it will be decided by the
norm_cfg. Bias will be set as True if norm_cfg is None, otherwise
False.
conv_cfg (dict): Config dict for convolution layer.
norm_cfg (dict): Config dict for normalization layer.
activation (str or None): Activation type, "ReLU" by default.
inplace (bool): Whether to use inplace mode for activation.
activate_last (bool): Whether to apply the activation layer in the
last. (Do not use this flag since the behavior and api may be
changed in the future.)
"""
def __init__(self, def __init__(self,
in_channels, in_channels,
...@@ -51,35 +72,42 @@ class ConvModule(nn.Module): ...@@ -51,35 +72,42 @@ class ConvModule(nn.Module):
padding=0, padding=0,
dilation=1, dilation=1,
groups=1, groups=1,
bias=True, bias='auto',
conv_cfg=None, conv_cfg=None,
normalize=None, norm_cfg=None,
activation='relu', activation='relu',
inplace=True, inplace=True,
activate_last=True): activate_last=True):
super(ConvModule, self).__init__() super(ConvModule, self).__init__()
assert conv_cfg is None or isinstance(conv_cfg, dict) assert conv_cfg is None or isinstance(conv_cfg, dict)
assert normalize is None or isinstance(normalize, dict) assert norm_cfg is None or isinstance(norm_cfg, dict)
self.with_norm = normalize is not None self.conv_cfg = conv_cfg
self.with_activatation = activation is not None self.norm_cfg = norm_cfg
self.with_bias = bias
self.activation = activation self.activation = activation
self.inplace = inplace
self.activate_last = activate_last self.activate_last = activate_last
self.with_norm = norm_cfg is not None
self.with_activatation = activation is not None
# if the conv layer is before a norm layer, bias is unnecessary.
if bias == 'auto':
bias = False if self.with_norm else True
self.with_bias = bias
if self.with_norm and self.with_bias: if self.with_norm and self.with_bias:
warnings.warn('ConvModule has norm and bias at the same time') warnings.warn('ConvModule has norm and bias at the same time')
self.conv = build_conv_layer( # build convolution layer
conv_cfg, self.conv = build_conv_layer(conv_cfg,
in_channels, in_channels,
out_channels, out_channels,
kernel_size, kernel_size,
stride, stride=stride,
padding, padding=padding,
dilation, dilation=dilation,
groups, groups=groups,
bias=bias) bias=bias)
# export the attributes of self.conv to a higher level for convenience
self.in_channels = self.conv.in_channels self.in_channels = self.conv.in_channels
self.out_channels = self.conv.out_channels self.out_channels = self.conv.out_channels
self.kernel_size = self.conv.kernel_size self.kernel_size = self.conv.kernel_size
...@@ -90,17 +118,21 @@ class ConvModule(nn.Module): ...@@ -90,17 +118,21 @@ class ConvModule(nn.Module):
self.output_padding = self.conv.output_padding self.output_padding = self.conv.output_padding
self.groups = self.conv.groups self.groups = self.conv.groups
# build normalization layers
if self.with_norm: if self.with_norm:
norm_channels = out_channels if self.activate_last else in_channels norm_channels = out_channels if self.activate_last else in_channels
self.norm_name, norm = build_norm_layer(normalize, norm_channels) self.norm_name, norm = build_norm_layer(norm_cfg, norm_channels)
self.add_module(self.norm_name, norm) self.add_module(self.norm_name, norm)
# build activation layer
if self.with_activatation: if self.with_activatation:
assert activation in ['relu'], 'Only ReLU supported.' if self.activation not in ['relu']:
raise ValueError('{} is currently not supported.'.format(
self.activation))
if self.activation == 'relu': if self.activation == 'relu':
self.activate = nn.ReLU(inplace=inplace) self.activate = nn.ReLU(inplace=inplace)
# Default using msra init # Use msra init by default
self.init_weights() self.init_weights()
@property @property
...@@ -121,6 +153,7 @@ class ConvModule(nn.Module): ...@@ -121,6 +153,7 @@ class ConvModule(nn.Module):
if activate and self.with_activatation: if activate and self.with_activatation:
x = self.activate(x) x = self.activate(x)
else: else:
# WARN: this may be removed or modified
if norm and self.with_norm: if norm and self.with_norm:
x = self.norm(x) x = self.norm(x)
if activate and self.with_activatation: if activate and self.with_activatation:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment