resnet.py 8.22 KB
Newer Older
Hang Zhang's avatar
sync BN  
Hang Zhang committed
1
"""Dilated ResNet"""
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
2
3
import math
import torch.utils.model_zoo as model_zoo
Hang Zhang's avatar
Hang Zhang committed
4
5
#from .. import nn
import torch.nn as nn
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
6
7

__all__ = ['ResNet', 'resnet18', 'resnet34', 'resnet50', 'resnet101',
Hang Zhang's avatar
v0.1.0  
Hang Zhang committed
8
           'resnet152', 'BasicBlock', 'Bottleneck']
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25

model_urls = {
    'resnet18': 'https://download.pytorch.org/models/resnet18-5c106cde.pth',
    'resnet34': 'https://download.pytorch.org/models/resnet34-333f7ec4.pth',
    'resnet50': 'https://download.pytorch.org/models/resnet50-19c8e357.pth',
    'resnet101': 'https://download.pytorch.org/models/resnet101-5d3b4d8f.pth',
    'resnet152': 'https://download.pytorch.org/models/resnet152-b121ed2d.pth',
}


def conv3x3(in_planes, out_planes, stride=1):
    "3x3 convolution with padding"
    return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
                     padding=1, bias=False)


class BasicBlock(nn.Module):
Hang Zhang's avatar
sync BN  
Hang Zhang committed
26
27
    """ResNet BasicBlock
    """
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
28
    expansion = 1
Hang Zhang's avatar
Hang Zhang committed
29
30
    def __init__(self, inplanes, planes, stride=1, dilation=1, downsample=None, first_dilation=1,
                 norm_layer=None):
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
31
32
33
        super(BasicBlock, self).__init__()
        self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=3, stride=stride,
                               padding=dilation, dilation=dilation, bias=False)
Hang Zhang's avatar
Hang Zhang committed
34
35
        self.bn1 = norm_layer(planes)
        self.relu = nn.ReLU(inplace=False)
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
36
        self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=1,
Hang Zhang's avatar
v0.1.0  
Hang Zhang committed
37
                               padding=first_dilation, dilation=first_dilation, bias=False)
Hang Zhang's avatar
Hang Zhang committed
38
        self.bn2 = norm_layer(planes)
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
        self.downsample = downsample
        self.stride = stride

    def forward(self, x):
        residual = x

        out = self.conv1(x)
        out = self.bn1(out)
        out = self.relu(out)

        out = self.conv2(out)
        out = self.bn2(out)

        if self.downsample is not None:
            residual = self.downsample(x)

        out += residual
        out = self.relu(out)

        return out


class Bottleneck(nn.Module):
Hang Zhang's avatar
sync BN  
Hang Zhang committed
62
63
64
    """ResNet Bottleneck
    """
    # pylint: disable=unused-argument
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
65
    expansion = 4
Hang Zhang's avatar
sync BN  
Hang Zhang committed
66
    def __init__(self, inplanes, planes, stride=1, dilation=1,
Hang Zhang's avatar
Hang Zhang committed
67
                 downsample=None, first_dilation=1, norm_layer=None):
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
68
69
        super(Bottleneck, self).__init__()
        self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
Hang Zhang's avatar
Hang Zhang committed
70
        self.bn1 = norm_layer(planes)
Hang Zhang's avatar
sync BN  
Hang Zhang committed
71
72
        self.conv2 = nn.Conv2d(
            planes, planes, kernel_size=3, stride=stride,
Hang Zhang's avatar
v0.1.0  
Hang Zhang committed
73
            padding=dilation, dilation=dilation, bias=False)
Hang Zhang's avatar
Hang Zhang committed
74
        self.bn2 = norm_layer(planes)
Hang Zhang's avatar
sync BN  
Hang Zhang committed
75
76
        self.conv3 = nn.Conv2d(
            planes, planes * 4, kernel_size=1, bias=False)
Hang Zhang's avatar
Hang Zhang committed
77
78
        self.bn3 = norm_layer(planes * 4)
        self.relu = nn.ReLU(inplace=False)
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
79
80
81
        self.downsample = downsample
        self.dilation = dilation
        self.stride = stride
Hang Zhang's avatar
sync BN  
Hang Zhang committed
82

Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
83
    def _sum_each(self, x, y):
Hang Zhang's avatar
sync BN  
Hang Zhang committed
84
        assert(len(x) == len(y))
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
        z = []
        for i in range(len(x)):
            z.append(x[i]+y[i])
        return z

    def forward(self, x):
        residual = x

        out = self.conv1(x)
        out = self.bn1(out)
        out = self.relu(out)

        out = self.conv2(out)
        out = self.bn2(out)
        out = self.relu(out)

        out = self.conv3(out)
        out = self.bn3(out)

        if self.downsample is not None:
            residual = self.downsample(x)

Hang Zhang's avatar
sync BN  
Hang Zhang committed
107
        out += residual
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
108
109
110
111
112
113
114
        out = self.relu(out)

        return out


class ResNet(nn.Module):
    """Dilated Pre-trained ResNet Model, which preduces the stride of 8 featuremaps at conv5.
Hang Zhang's avatar
sync BN  
Hang Zhang committed
115

Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
116
    Reference:
Hang Zhang's avatar
v0.1.0  
Hang Zhang committed
117
118
119
120

        - He, Kaiming, et al. "Deep residual learning for image recognition." Proceedings of the IEEE conference on computer vision and pattern recognition. 2016.

        - Yu, Fisher, and Vladlen Koltun. "Multi-scale context aggregation by dilated convolutions."
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
121
    """
Hang Zhang's avatar
sync BN  
Hang Zhang committed
122
    # pylint: disable=unused-variable
Hang Zhang's avatar
Hang Zhang committed
123
    def __init__(self, block, layers, num_classes=1000, norm_layer=None):
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
124
125
126
127
        self.inplanes = 64
        super(ResNet, self).__init__()
        self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3,
                               bias=False)
Hang Zhang's avatar
Hang Zhang committed
128
129
        self.bn1 = norm_layer(64)
        self.relu = nn.ReLU(inplace=False)
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
130
        self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
Hang Zhang's avatar
Hang Zhang committed
131
132
133
134
        self.layer1 = self._make_layer(block, 64, layers[0], norm_layer=norm_layer)
        self.layer2 = self._make_layer(block, 128, layers[1], stride=2, norm_layer=norm_layer)
        self.layer3 = self._make_layer(block, 256, layers[2], stride=1, dilation=2, norm_layer=norm_layer)
        self.layer4 = self._make_layer(block, 512, layers[3], stride=1, dilation=4, norm_layer=norm_layer)
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
135
136
137
138
139
140
141
        self.avgpool = nn.AvgPool2d(7)
        self.fc = nn.Linear(512 * block.expansion, num_classes)

        for m in self.modules():
            if isinstance(m, nn.Conv2d):
                n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
                m.weight.data.normal_(0, math.sqrt(2. / n))
Hang Zhang's avatar
Hang Zhang committed
142
            elif isinstance(m, norm_layer):
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
143
144
145
                m.weight.data.fill_(1)
                m.bias.data.zero_()

Hang Zhang's avatar
Hang Zhang committed
146
    def _make_layer(self, block, planes, blocks, stride=1, dilation=1, norm_layer=None):
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
147
148
149
150
151
        downsample = None
        if stride != 1 or self.inplanes != planes * block.expansion:
            downsample = nn.Sequential(
                nn.Conv2d(self.inplanes, planes * block.expansion,
                          kernel_size=1, stride=stride, bias=False),
Hang Zhang's avatar
Hang Zhang committed
152
                norm_layer(planes * block.expansion),
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
153
154
155
156
            )

        layers = []
        if dilation == 1 or dilation == 2:
Hang Zhang's avatar
sync BN  
Hang Zhang committed
157
            layers.append(block(self.inplanes, planes, stride, dilation=1,
Hang Zhang's avatar
Hang Zhang committed
158
                                downsample=downsample, first_dilation=dilation, norm_layer=norm_layer))
Hang Zhang's avatar
sync BN  
Hang Zhang committed
159
160
        elif dilation == 4:
            layers.append(block(self.inplanes, planes, stride, dilation=2,
Hang Zhang's avatar
Hang Zhang committed
161
                                downsample=downsample, first_dilation=dilation, norm_layer=norm_layer))
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
162
163
        else:
            raise RuntimeError("=> unknown dilation size: {}".format(dilation))
Hang Zhang's avatar
sync BN  
Hang Zhang committed
164

Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
165
166
        self.inplanes = planes * block.expansion
        for i in range(1, blocks):
Hang Zhang's avatar
Hang Zhang committed
167
168
            layers.append(block(self.inplanes, planes, dilation=dilation, first_dilation=dilation,
                                norm_layer=norm_layer))
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247

        return nn.Sequential(*layers)

    def forward(self, x):
        x = self.conv1(x)
        x = self.bn1(x)
        x = self.relu(x)
        x = self.maxpool(x)

        x = self.layer1(x)
        x = self.layer2(x)
        x = self.layer3(x)
        x = self.layer4(x)

        x = self.avgpool(x)
        x = x.view(x.size(0), -1)
        x = self.fc(x)

        return x


def resnet18(pretrained=False, **kwargs):
    """Constructs a ResNet-18 model.

    Args:
        pretrained (bool): If True, returns a model pre-trained on ImageNet
    """
    model = ResNet(BasicBlock, [2, 2, 2, 2], **kwargs)
    if pretrained:
        model.load_state_dict(model_zoo.load_url(model_urls['resnet18']))
    return model


def resnet34(pretrained=False, **kwargs):
    """Constructs a ResNet-34 model.

    Args:
        pretrained (bool): If True, returns a model pre-trained on ImageNet
    """
    model = ResNet(BasicBlock, [3, 4, 6, 3], **kwargs)
    if pretrained:
        model.load_state_dict(model_zoo.load_url(model_urls['resnet34']))
    return model


def resnet50(pretrained=False, **kwargs):
    """Constructs a ResNet-50 model.

    Args:
        pretrained (bool): If True, returns a model pre-trained on ImageNet
    """
    model = ResNet(Bottleneck, [3, 4, 6, 3], **kwargs)
    if pretrained:
        model.load_state_dict(model_zoo.load_url(model_urls['resnet50']))
    return model


def resnet101(pretrained=False, **kwargs):
    """Constructs a ResNet-101 model.

    Args:
        pretrained (bool): If True, returns a model pre-trained on ImageNet
    """
    model = ResNet(Bottleneck, [3, 4, 23, 3], **kwargs)
    if pretrained:
        model.load_state_dict(model_zoo.load_url(model_urls['resnet101']))
    return model


def resnet152(pretrained=False, **kwargs):
    """Constructs a ResNet-152 model.

    Args:
        pretrained (bool): If True, returns a model pre-trained on ImageNet
    """
    model = ResNet(Bottleneck, [3, 8, 36, 3], **kwargs)
    if pretrained:
        model.load_state_dict(model_zoo.load_url(model_urls['resnet152']))
    return model