Commit 82e75455 authored by thangvu's avatar thangvu Committed by ThangVu
Browse files

revise norm config

parent 55a4feb5
# model settings
normalize = dict(
type='GN',
num_groups=32,
frozen=False)
model = dict(
type='MaskRCNN',
pretrained='tools/resnet50-GN.path',
......@@ -9,20 +14,13 @@ model = dict(
out_indices=(0, 1, 2, 3),
frozen_stages=1,
style='pytorch',
# Note: eval_mode and frozen are required args for backbone
normalize=dict(
type='GN',
num_groups=32,
eval_mode=False,
frozen=False)),
normalize=normalize),
neck=dict(
type='FPN',
in_channels=[256, 512, 1024, 2048],
out_channels=256,
num_outs=5,
normalize=dict(
type='GN',
num_groups=32)),
normalize=normalize),
rpn_head=dict(
type='RPNHead',
in_channels=256,
......@@ -50,9 +48,7 @@ model = dict(
target_means=[0., 0., 0., 0.],
target_stds=[0.1, 0.1, 0.2, 0.2],
reg_class_agnostic=False,
normalize=dict(
type='GN',
num_groups=32)),
normalize=normalize),
mask_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', out_size=14, sample_num=2),
......@@ -64,9 +60,7 @@ model = dict(
in_channels=256,
conv_out_channels=256,
num_classes=81,
normalize=dict(
type='GN',
num_groups=32)))
normalize=normalize))
# model training and testing settings
train_cfg = dict(
......
......@@ -236,11 +236,14 @@ class ResNet(nn.Module):
the first 1x1 conv layer.
frozen_stages (int): Stages to be frozen (all param fixed). -1 means
not freezing any parameters.
normalize (dict): dictionary to construct norm layer. Additionally,
eval mode and gradent freezing are controlled by
eval (bool) and frozen (bool) respectively.
normalize (dict): dictionary to construct and config norm layer.
norm_eval (bool): Whether to set norm layers to eval mode, namely,
freeze running stats (mean and var). Note: Effect on Batch Norm
and its variants only.
with_cp (bool): Use checkpoint or not. Using checkpoint will save some
memory while slowing down the training speed.
zero_init_residual (bool): whether to use zero init for last norm layer
in resblocks to let them behave as identity.
"""
arch_settings = {
......@@ -261,8 +264,8 @@ class ResNet(nn.Module):
frozen_stages=-1,
normalize=dict(
type='BN',
eval_mode=True,
frozen=False),
norm_eval=True,
with_cp=False,
zero_init_residual=True):
super(ResNet, self).__init__()
......@@ -278,11 +281,9 @@ class ResNet(nn.Module):
assert max(out_indices) < num_stages
self.style = style
self.frozen_stages = frozen_stages
assert (isinstance(normalize, dict) and 'eval_mode' in normalize
and 'frozen' in normalize)
self.norm_eval = normalize.pop('eval_mode')
self.normalize = normalize
self.with_cp = with_cp
self.norm_eval = norm_eval
self.zero_init_residual = zero_init_residual
self.block, stage_blocks = self.arch_settings[depth]
self.stage_blocks = stage_blocks[:num_stages]
......@@ -350,7 +351,6 @@ class ResNet(nn.Module):
elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
constant_init(m, 1)
# zero init for last norm layer https://arxiv.org/abs/1706.02677
if self.zero_init_residual:
for m in self.modules():
if isinstance(m, Bottleneck):
......
......@@ -122,11 +122,14 @@ class ResNeXt(ResNet):
the first 1x1 conv layer.
frozen_stages (int): Stages to be frozen (all param fixed). -1 means
not freezing any parameters.
normalize (dict): dictionary to construct norm layer. Additionally,
eval mode and gradent freezing are controlled by
eval (bool) and frozen (bool) respectively.
normalize (dict): dictionary to construct and config norm layer.
norm_eval (bool): Whether to set norm layers to eval mode, namely,
freeze running stats (mean and var). Note: Effect on Batch Norm
and its variants only.
with_cp (bool): Use checkpoint or not. Using checkpoint will save some
memory while slowing down the training speed.
zero_init_residual (bool): whether to use zero init for last norm layer
in resblocks to let them behave as identity.
"""
arch_settings = {
......
......@@ -31,13 +31,6 @@ def build_norm_layer(cfg, num_features, postfix=''):
assert isinstance(cfg, dict) and 'type' in cfg
cfg_ = cfg.copy()
# eval_mode is supported and popped out for processing in module
# having pretrained weight only (e.g. backbone)
# raise an exception if eval_mode is in here
if 'eval_mode' in cfg:
raise Exception('eval_mode for modules without pretrained weights '
'is not supported')
layer_type = cfg_.pop('type')
if layer_type not in norm_cfg:
raise KeyError('Unrecognized norm type {}'.format(layer_type))
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment