"...text-generation-inference.git" did not exist on "55bd4fed7da83a566dca08b0bb29dbc5929a90eb"
Unverified Commit 371a2172 authored by ZhangShilong's avatar ZhangShilong Committed by GitHub
Browse files

fix bug of convmodule (#889)

* fix bug of convmodule

* fix bug of convmodule

* fix unitest

* remove assert
parent 97730c23
...@@ -146,6 +146,8 @@ class ConvModule(nn.Module): ...@@ -146,6 +146,8 @@ class ConvModule(nn.Module):
norm_channels = in_channels norm_channels = in_channels
self.norm_name, norm = build_norm_layer(norm_cfg, norm_channels) self.norm_name, norm = build_norm_layer(norm_cfg, norm_channels)
self.add_module(self.norm_name, norm) self.add_module(self.norm_name, norm)
else:
self.norm_name = None
# build activation layer # build activation layer
if self.with_activation: if self.with_activation:
...@@ -162,7 +164,10 @@ class ConvModule(nn.Module): ...@@ -162,7 +164,10 @@ class ConvModule(nn.Module):
@property @property
def norm(self): def norm(self):
return getattr(self, self.norm_name) if self.norm_name:
return getattr(self, self.norm_name)
else:
return None
def init_weights(self): def init_weights(self):
# 1. It is mainly for customized conv layers with their own # 1. It is mainly for customized conv layers with their own
......
...@@ -75,7 +75,7 @@ def test_conv_module(): ...@@ -75,7 +75,7 @@ def test_conv_module():
assert conv.with_activation assert conv.with_activation
assert hasattr(conv, 'activate') assert hasattr(conv, 'activate')
assert not conv.with_norm assert not conv.with_norm
assert not hasattr(conv, 'norm') assert conv.norm is None
x = torch.rand(1, 3, 256, 256) x = torch.rand(1, 3, 256, 256)
output = conv(x) output = conv(x)
assert output.shape == (1, 8, 255, 255) assert output.shape == (1, 8, 255, 255)
...@@ -83,7 +83,7 @@ def test_conv_module(): ...@@ -83,7 +83,7 @@ def test_conv_module():
# conv # conv
conv = ConvModule(3, 8, 2, act_cfg=None) conv = ConvModule(3, 8, 2, act_cfg=None)
assert not conv.with_norm assert not conv.with_norm
assert not hasattr(conv, 'norm') assert conv.norm is None
assert not conv.with_activation assert not conv.with_activation
assert not hasattr(conv, 'activate') assert not hasattr(conv, 'activate')
x = torch.rand(1, 3, 256, 256) x = torch.rand(1, 3, 256, 256)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment