Unverified Commit 6efefa27 authored by Kai Chen's avatar Kai Chen Committed by GitHub
Browse files

Merge pull request #20 from open-mmlab/dev

Initial public release
parents 2cf13281 54b54d88
This diff is collapsed.
This diff is collapsed.
from .fcn_mask_head import FCNMaskHead
__all__ = ['FCNMaskHead']
This diff is collapsed.
from .fpn import FPN
__all__ = ['FPN']
This diff is collapsed.
from .single_level import SingleRoIExtractor
__all__ = ['SingleRoIExtractor']
This diff is collapsed.
from .rpn_head import RPNHead
__all__ = ['RPNHead']
This diff is collapsed.
from .conv_module import ConvModule
from .norm import build_norm_layer
from .weight_init import xavier_init, normal_init, uniform_init, kaiming_init
__all__ = [
'ConvModule', 'build_norm_layer', 'xavier_init', 'normal_init',
'uniform_init', 'kaiming_init'
]
This diff is collapsed.
import torch.nn as nn
norm_cfg = {'BN': nn.BatchNorm2d, 'SyncBN': None, 'GN': None}
def build_norm_layer(cfg, num_features):
assert isinstance(cfg, dict) and 'type' in cfg
cfg_ = cfg.copy()
cfg_.setdefault('eps', 1e-5)
layer_type = cfg_.pop('type')
if layer_type not in norm_cfg:
raise KeyError('Unrecognized norm type {}'.format(layer_type))
elif norm_cfg[layer_type] is None:
raise NotImplementedError
return norm_cfg[layer_type](num_features, **cfg_)
import torch.nn as nn
def xavier_init(module, gain=1, bias=0, distribution='normal'):
assert distribution in ['uniform', 'normal']
if distribution == 'uniform':
nn.init.xavier_uniform_(module.weight, gain=gain)
else:
nn.init.xavier_normal_(module.weight, gain=gain)
if hasattr(module, 'bias'):
nn.init.constant_(module.bias, bias)
def normal_init(module, mean=0, std=1, bias=0):
nn.init.normal_(module.weight, mean, std)
if hasattr(module, 'bias'):
nn.init.constant_(module.bias, bias)
def uniform_init(module, a=0, b=1, bias=0):
nn.init.uniform_(module.weight, a, b)
if hasattr(module, 'bias'):
nn.init.constant_(module.bias, bias)
def kaiming_init(module,
mode='fan_out',
nonlinearity='relu',
bias=0,
distribution='normal'):
assert distribution in ['uniform', 'normal']
if distribution == 'uniform':
nn.init.kaiming_uniform_(
module.weight, mode=mode, nonlinearity=nonlinearity)
else:
nn.init.kaiming_normal_(
module.weight, mode=mode, nonlinearity=nonlinearity)
if hasattr(module, 'bias'):
nn.init.constant_(module.bias, bias)
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment