Unverified Commit 7cfc839e authored by Jerry Jiarui XU's avatar Jerry Jiarui XU Committed by GitHub
Browse files

Add MMSyncBN in registry (#420)

* Add MMSyncBN in registery

* skip mmsyncbn test
parent 50f69e70
...@@ -6,6 +6,7 @@ from torch.autograd.function import once_differentiable ...@@ -6,6 +6,7 @@ from torch.autograd.function import once_differentiable
from torch.nn.modules.module import Module from torch.nn.modules.module import Module
from torch.nn.parameter import Parameter from torch.nn.parameter import Parameter
from mmcv.cnn import NORM_LAYERS
from ..utils import ext_loader from ..utils import ext_loader
ext_module = ext_loader.load_ext('_ext', [ ext_module = ext_loader.load_ext('_ext', [
...@@ -109,9 +110,8 @@ class SyncBatchNormFunction(Function): ...@@ -109,9 +110,8 @@ class SyncBatchNormFunction(Function):
None, None, None, None None, None, None, None
if dist.is_available(): @NORM_LAYERS.register_module(name='MMSyncBN')
class SyncBatchNorm(Module):
class SyncBatchNorm(Module):
def __init__(self, def __init__(self,
num_features, num_features,
...@@ -119,14 +119,14 @@ if dist.is_available(): ...@@ -119,14 +119,14 @@ if dist.is_available():
momentum=0.1, momentum=0.1,
affine=True, affine=True,
track_running_stats=True, track_running_stats=True,
group=dist.group.WORLD): group=None):
super(SyncBatchNorm, self).__init__() super(SyncBatchNorm, self).__init__()
self.num_features = num_features self.num_features = num_features
self.eps = eps self.eps = eps
self.momentum = momentum self.momentum = momentum
self.affine = affine self.affine = affine
self.track_running_stats = track_running_stats self.track_running_stats = track_running_stats
self.group = group self.group = dist.group.WORLD if group is None else group
self.group_size = dist.get_world_size(group) self.group_size = dist.get_world_size(group)
if self.affine: if self.affine:
self.weight = Parameter(torch.Tensor(num_features)) self.weight = Parameter(torch.Tensor(num_features))
...@@ -177,8 +177,8 @@ if dist.is_available(): ...@@ -177,8 +177,8 @@ if dist.is_available():
if self.training or not self.track_running_stats: if self.training or not self.track_running_stats:
return SyncBatchNormFunction.apply(input, self.running_mean, return SyncBatchNormFunction.apply(input, self.running_mean,
self.running_var, self.running_var, self.weight,
self.weight, self.bias, self.bias,
exponential_average_factor, exponential_average_factor,
self.eps, self.group, self.eps, self.group,
self.group_size) self.group_size)
...@@ -196,12 +196,3 @@ if dist.is_available(): ...@@ -196,12 +196,3 @@ if dist.is_available():
s += f'track_running_stats={self.track_running_stats}, ' s += f'track_running_stats={self.track_running_stats}, '
s += f'group_size={self.group_size})' s += f'group_size={self.group_size})'
return s return s
else:
class SyncBatchNorm(Module):
def __init__(self, *args, **kwargs):
raise NotImplementedError(
'SyncBatchNorm is not supported in this OS since the '
'distributed package is not available')
...@@ -135,6 +135,8 @@ def test_build_norm_layer(): ...@@ -135,6 +135,8 @@ def test_build_norm_layer():
'IN3d': 'in', 'IN3d': 'in',
} }
for type_name, module in NORM_LAYERS.module_dict.items(): for type_name, module in NORM_LAYERS.module_dict.items():
if type_name == 'MMSyncBN': # skip MMSyncBN
continue
for postfix in ['_test', 1]: for postfix in ['_test', 1]:
cfg = dict(type=type_name) cfg = dict(type=type_name)
if type_name == 'GN': if type_name == 'GN':
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment