import torch.nn as nn norm_cfg = { # format: layer_type: (abbreation, module) 'BN': ('bn', nn.BatchNorm2d), 'SyncBN': ('bn', None), 'GN': ('gn', nn.GroupNorm), # and potentially 'SN' } def build_norm_layer(cfg, num_features, postfix=''): """ cfg should contain: type (str): identify norm layer type. layer args: args needed to instantiate a norm layer. frozen (bool): [optional] whether stop gradient updates of norm layer, it is helpful to set frozen mode in backbone's norms. """ assert isinstance(cfg, dict) and 'type' in cfg cfg_ = cfg.copy() layer_type = cfg_.pop('type') if layer_type not in norm_cfg: raise KeyError('Unrecognized norm type {}'.format(layer_type)) else: abbr, norm_layer = norm_cfg[layer_type] if norm_layer is None: raise NotImplementedError assert isinstance(postfix, (int, str)) name = abbr + str(postfix) frozen = cfg_.pop('frozen', False) cfg_.setdefault('eps', 1e-5) if layer_type != 'GN': layer = norm_layer(num_features, **cfg_) else: assert 'num_groups' in cfg_ layer = norm_layer(num_channels=num_features, **cfg_) if frozen: for param in layer.parameters(): param.requires_grad = False return name, layer