resnet.py 10.6 KB
Newer Older
Hang Zhang's avatar
sync BN  
Hang Zhang committed
1
"""Dilated ResNet"""
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
2
import math
Hang Zhang's avatar
Hang Zhang committed
3
import torch
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
4
import torch.utils.model_zoo as model_zoo
Zhang's avatar
Zhang committed
5
import torch.nn as nn
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
6

Hang Zhang's avatar
Hang Zhang committed
7
8
9
from ..nn import GlobalAvgPool2d
from ..models.model_store import get_model_file

Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
10
__all__ = ['ResNet', 'resnet18', 'resnet34', 'resnet50', 'resnet101',
Hang Zhang's avatar
v0.1.0  
Hang Zhang committed
11
           'resnet152', 'BasicBlock', 'Bottleneck']
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
12
13
14
15
16
17
18
19
20
21
22
23
24
25

model_urls = {
    'resnet18': 'https://download.pytorch.org/models/resnet18-5c106cde.pth',
    'resnet34': 'https://download.pytorch.org/models/resnet34-333f7ec4.pth',
}


def conv3x3(in_planes, out_planes, stride=1):
    "3x3 convolution with padding"
    return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
                     padding=1, bias=False)


class BasicBlock(nn.Module):
Hang Zhang's avatar
sync BN  
Hang Zhang committed
26
27
    """ResNet BasicBlock
    """
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
28
    expansion = 1
Zhang's avatar
v0.4.2  
Zhang committed
29
    def __init__(self, inplanes, planes, stride=1, dilation=1, downsample=None, previous_dilation=1,
Zhang's avatar
Zhang committed
30
                 norm_layer=None):
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
31
32
33
        super(BasicBlock, self).__init__()
        self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=3, stride=stride,
                               padding=dilation, dilation=dilation, bias=False)
Zhang's avatar
Zhang committed
34
        self.bn1 = norm_layer(planes)
Zhang's avatar
v0.4.2  
Zhang committed
35
        self.relu = nn.ReLU(inplace=True)
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
36
        self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=1,
Zhang's avatar
v0.4.2  
Zhang committed
37
                               padding=previous_dilation, dilation=previous_dilation, bias=False)
Zhang's avatar
Zhang committed
38
        self.bn2 = norm_layer(planes)
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
        self.downsample = downsample
        self.stride = stride

    def forward(self, x):
        residual = x

        out = self.conv1(x)
        out = self.bn1(out)
        out = self.relu(out)

        out = self.conv2(out)
        out = self.bn2(out)

        if self.downsample is not None:
            residual = self.downsample(x)

        out += residual
        out = self.relu(out)

        return out


class Bottleneck(nn.Module):
Hang Zhang's avatar
sync BN  
Hang Zhang committed
62
63
64
    """ResNet Bottleneck
    """
    # pylint: disable=unused-argument
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
65
    expansion = 4
Hang Zhang's avatar
sync BN  
Hang Zhang committed
66
    def __init__(self, inplanes, planes, stride=1, dilation=1,
Zhang's avatar
v0.4.2  
Zhang committed
67
                 downsample=None, previous_dilation=1, norm_layer=None):
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
68
69
        super(Bottleneck, self).__init__()
        self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
Zhang's avatar
Zhang committed
70
        self.bn1 = norm_layer(planes)
Hang Zhang's avatar
sync BN  
Hang Zhang committed
71
72
        self.conv2 = nn.Conv2d(
            planes, planes, kernel_size=3, stride=stride,
Hang Zhang's avatar
v0.1.0  
Hang Zhang committed
73
            padding=dilation, dilation=dilation, bias=False)
Zhang's avatar
Zhang committed
74
        self.bn2 = norm_layer(planes)
Hang Zhang's avatar
sync BN  
Hang Zhang committed
75
76
        self.conv3 = nn.Conv2d(
            planes, planes * 4, kernel_size=1, bias=False)
Zhang's avatar
Zhang committed
77
        self.bn3 = norm_layer(planes * 4)
Zhang's avatar
v0.4.2  
Zhang committed
78
        self.relu = nn.ReLU(inplace=True)
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
79
80
81
        self.downsample = downsample
        self.dilation = dilation
        self.stride = stride
Hang Zhang's avatar
sync BN  
Hang Zhang committed
82

Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
83
    def _sum_each(self, x, y):
Hang Zhang's avatar
sync BN  
Hang Zhang committed
84
        assert(len(x) == len(y))
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
        z = []
        for i in range(len(x)):
            z.append(x[i]+y[i])
        return z

    def forward(self, x):
        residual = x

        out = self.conv1(x)
        out = self.bn1(out)
        out = self.relu(out)

        out = self.conv2(out)
        out = self.bn2(out)
        out = self.relu(out)

        out = self.conv3(out)
        out = self.bn3(out)

        if self.downsample is not None:
            residual = self.downsample(x)

Hang Zhang's avatar
sync BN  
Hang Zhang committed
107
        out += residual
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
108
109
110
111
112
113
114
        out = self.relu(out)

        return out


class ResNet(nn.Module):
    """Dilated Pre-trained ResNet Model, which preduces the stride of 8 featuremaps at conv5.
Hang Zhang's avatar
sync BN  
Hang Zhang committed
115

Zhang's avatar
v0.4.2  
Zhang committed
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
    Parameters
    ----------
    block : Block
        Class for the residual block. Options are BasicBlockV1, BottleneckV1.
    layers : list of int
        Numbers of layers in each block
    classes : int, default 1000
        Number of classification classes.
    dilated : bool, default False
        Applying dilation strategy to pretrained ResNet yielding a stride-8 model,
        typically used in Semantic Segmentation.
    norm_layer : object
        Normalization layer used in backbone network (default: :class:`mxnet.gluon.nn.BatchNorm`;
        for Synchronized Cross-GPU BachNormalization).

Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
131
    Reference:
Hang Zhang's avatar
v0.1.0  
Hang Zhang committed
132
133
134
135

        - He, Kaiming, et al. "Deep residual learning for image recognition." Proceedings of the IEEE conference on computer vision and pattern recognition. 2016.

        - Yu, Fisher, and Vladlen Koltun. "Multi-scale context aggregation by dilated convolutions."
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
136
    """
Hang Zhang's avatar
sync BN  
Hang Zhang committed
137
    # pylint: disable=unused-variable
Hang Zhang's avatar
Hang Zhang committed
138
    def __init__(self, block, layers, num_classes=1000, dilated=False, multi_grid=False,
Hang Zhang's avatar
Hang Zhang committed
139
140
                 deep_base=True, norm_layer=nn.BatchNorm2d):
        self.inplanes = 128 if deep_base else 64
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
141
        super(ResNet, self).__init__()
Hang Zhang's avatar
Hang Zhang committed
142
143
144
145
146
147
148
149
150
151
152
153
154
155
        if deep_base:
            self.conv1 = nn.Sequential(
                nn.Conv2d(3, 64, kernel_size=3, stride=2, padding=1, bias=False),
                norm_layer(64),
                nn.ReLU(inplace=True),
                nn.Conv2d(64, 64, kernel_size=3, stride=1, padding=1, bias=False),
                norm_layer(64),
                nn.ReLU(inplace=True),
                nn.Conv2d(64, 128, kernel_size=3, stride=1, padding=1, bias=False),
            )
        else:
            self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3,
                                   bias=False)
        self.bn1 = norm_layer(self.inplanes)
Zhang's avatar
v0.4.2  
Zhang committed
156
        self.relu = nn.ReLU(inplace=True)
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
157
        self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
Zhang's avatar
Zhang committed
158
159
        self.layer1 = self._make_layer(block, 64, layers[0], norm_layer=norm_layer)
        self.layer2 = self._make_layer(block, 128, layers[1], stride=2, norm_layer=norm_layer)
Zhang's avatar
v0.4.2  
Zhang committed
160
161
162
        if dilated:
            self.layer3 = self._make_layer(block, 256, layers[2], stride=1,
                                           dilation=2, norm_layer=norm_layer)
Hang Zhang's avatar
Hang Zhang committed
163
164
165
166
167
168
169
            if multi_grid:
                self.layer4 = self._make_layer(block, 512, layers[3], stride=1,
                                               dilation=4, norm_layer=norm_layer,
                                               multi_grid=True)
            else:
                self.layer4 = self._make_layer(block, 512, layers[3], stride=1,
                                               dilation=4, norm_layer=norm_layer)
Zhang's avatar
v0.4.2  
Zhang committed
170
171
172
173
174
        else:
            self.layer3 = self._make_layer(block, 256, layers[2], stride=2,
                                           norm_layer=norm_layer)
            self.layer4 = self._make_layer(block, 512, layers[3], stride=2,
                                           norm_layer=norm_layer)
Hang Zhang's avatar
Hang Zhang committed
175
        self.avgpool = GlobalAvgPool2d()
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
176
177
178
179
180
181
        self.fc = nn.Linear(512 * block.expansion, num_classes)

        for m in self.modules():
            if isinstance(m, nn.Conv2d):
                n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
                m.weight.data.normal_(0, math.sqrt(2. / n))
Zhang's avatar
Zhang committed
182
            elif isinstance(m, norm_layer):
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
183
184
185
                m.weight.data.fill_(1)
                m.bias.data.zero_()

Hang Zhang's avatar
Hang Zhang committed
186
    def _make_layer(self, block, planes, blocks, stride=1, dilation=1, norm_layer=None, multi_grid=False):
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
187
188
189
190
191
        downsample = None
        if stride != 1 or self.inplanes != planes * block.expansion:
            downsample = nn.Sequential(
                nn.Conv2d(self.inplanes, planes * block.expansion,
                          kernel_size=1, stride=stride, bias=False),
Zhang's avatar
Zhang committed
192
                norm_layer(planes * block.expansion),
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
193
194
195
            )

        layers = []
Hang Zhang's avatar
Hang Zhang committed
196
197
198
199
200
        multi_dilations = [4, 8, 16]
        if multi_grid:
            layers.append(block(self.inplanes, planes, stride, dilation=multi_dilations[0],
                                downsample=downsample, previous_dilation=dilation, norm_layer=norm_layer))
        elif dilation == 1 or dilation == 2:
Hang Zhang's avatar
sync BN  
Hang Zhang committed
201
            layers.append(block(self.inplanes, planes, stride, dilation=1,
Zhang's avatar
v0.4.2  
Zhang committed
202
                                downsample=downsample, previous_dilation=dilation, norm_layer=norm_layer))
Hang Zhang's avatar
sync BN  
Hang Zhang committed
203
204
        elif dilation == 4:
            layers.append(block(self.inplanes, planes, stride, dilation=2,
Zhang's avatar
v0.4.2  
Zhang committed
205
                                downsample=downsample, previous_dilation=dilation, norm_layer=norm_layer))
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
206
207
        else:
            raise RuntimeError("=> unknown dilation size: {}".format(dilation))
Hang Zhang's avatar
sync BN  
Hang Zhang committed
208

Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
209
210
        self.inplanes = planes * block.expansion
        for i in range(1, blocks):
Hang Zhang's avatar
Hang Zhang committed
211
212
213
214
215
216
            if multi_grid:
                layers.append(block(self.inplanes, planes, dilation=multi_dilations[i],
                                    previous_dilation=dilation, norm_layer=norm_layer))
            else:
                layers.append(block(self.inplanes, planes, dilation=dilation, previous_dilation=dilation,
                                    norm_layer=norm_layer))
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261

        return nn.Sequential(*layers)

    def forward(self, x):
        x = self.conv1(x)
        x = self.bn1(x)
        x = self.relu(x)
        x = self.maxpool(x)

        x = self.layer1(x)
        x = self.layer2(x)
        x = self.layer3(x)
        x = self.layer4(x)

        x = self.avgpool(x)
        x = x.view(x.size(0), -1)
        x = self.fc(x)

        return x


def resnet18(pretrained=False, **kwargs):
    """Constructs a ResNet-18 model.

    Args:
        pretrained (bool): If True, returns a model pre-trained on ImageNet
    """
    model = ResNet(BasicBlock, [2, 2, 2, 2], **kwargs)
    if pretrained:
        model.load_state_dict(model_zoo.load_url(model_urls['resnet18']))
    return model


def resnet34(pretrained=False, **kwargs):
    """Constructs a ResNet-34 model.

    Args:
        pretrained (bool): If True, returns a model pre-trained on ImageNet
    """
    model = ResNet(BasicBlock, [3, 4, 6, 3], **kwargs)
    if pretrained:
        model.load_state_dict(model_zoo.load_url(model_urls['resnet34']))
    return model


Hang Zhang's avatar
Hang Zhang committed
262
def resnet50(pretrained=False, root='~/.encoding/models', **kwargs):
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
263
264
265
266
267
268
269
    """Constructs a ResNet-50 model.

    Args:
        pretrained (bool): If True, returns a model pre-trained on ImageNet
    """
    model = ResNet(Bottleneck, [3, 4, 6, 3], **kwargs)
    if pretrained:
Hang Zhang's avatar
Hang Zhang committed
270
271
        model.load_state_dict(torch.load(
            get_model_file('resnet50', root=root)), strict=False)
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
272
273
274
    return model


Hang Zhang's avatar
Hang Zhang committed
275
def resnet101(pretrained=False, root='~/.encoding/models', **kwargs):
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
276
277
278
279
280
281
282
    """Constructs a ResNet-101 model.

    Args:
        pretrained (bool): If True, returns a model pre-trained on ImageNet
    """
    model = ResNet(Bottleneck, [3, 4, 23, 3], **kwargs)
    if pretrained:
Hang Zhang's avatar
Hang Zhang committed
283
284
        model.load_state_dict(torch.load(
            get_model_file('resnet101', root=root)), strict=False)
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
285
286
287
    return model


Hang Zhang's avatar
Hang Zhang committed
288
def resnet152(pretrained=False, root='~/.encoding/models', **kwargs):
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
289
290
291
292
293
294
295
    """Constructs a ResNet-152 model.

    Args:
        pretrained (bool): If True, returns a model pre-trained on ImageNet
    """
    model = ResNet(Bottleneck, [3, 8, 36, 3], **kwargs)
    if pretrained:
Hang Zhang's avatar
Hang Zhang committed
296
297
        model.load_state_dict(torch.load(
            get_model_file('resnet152', root=root)), strict=False)
Hang Zhang's avatar
v1.0.1  
Hang Zhang committed
298
    return model