Commit 22944397 authored by Zhang's avatar Zhang
Browse files

v0.2.0

parent 8fbc9bb6
##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
## Created by: Hang Zhang
## ECE Department, Rutgers University
## Email: zhang.hang@rutgers.edu
## Copyright (c) 2017
##
## This source code is licensed under the MIT-style license found in the
## LICENSE file in the root directory of this source tree
##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
import encoding
def conv3x3(in_planes, out_planes, stride=1):
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=False)
##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
class Basicblock(nn.Module):
""" Pre-activation residual block
Identity Mapping in Deep Residual Networks
ref https://arxiv.org/abs/1603.05027
"""
def __init__(self, inplanes, planes, stride=1,
norm_layer=nn.BatchNorm2d):
super(Basicblock, self).__init__()
if inplanes != planes or stride !=1 :
self.downsample = True
self.residual_layer = nn.Conv2d(inplanes, planes,
kernel_size=1, stride=stride)
else:
self.downsample = False
conv_block=[]
conv_block+=[norm_layer(inplanes),
nn.ReLU(inplace=True),
conv3x3(inplanes, planes,stride=stride),
norm_layer(planes),
nn.ReLU(inplace=True),
conv3x3(planes, planes)]
self.conv_block = nn.Sequential(*conv_block)
def forward(self, input):
if self.downsample:
residual = self.residual_layer(input)
else:
residual = input
return residual + self.conv_block(input)
class Bottleneck(nn.Module):
""" Pre-activation residual block
Identity Mapping in Deep Residual Networks
ref https://arxiv.org/abs/1603.05027
"""
def __init__(self, inplanes, planes, stride=1,norm_layer=nn.BatchNorm2d):
super(Bottleneck, self).__init__()
self.expansion = 4
if inplanes != planes*self.expansion or stride !=1 :
self.downsample = True
self.residual_layer = nn.Conv2d(inplanes,
planes * self.expansion, kernel_size=1, stride=stride)
else:
self.downsample = False
conv_block = []
conv_block += [norm_layer(inplanes),
nn.ReLU(inplace=True),
nn.Conv2d(inplanes, planes, kernel_size=1,
stride=1, bias=False)]
conv_block += [norm_layer(planes),
nn.ReLU(inplace=True),
nn.Conv2d(planes, planes, kernel_size=3,
stride=stride, padding=1, bias=False)]
conv_block += [norm_layer(planes),
nn.ReLU(inplace=True),
nn.Conv2d(planes, planes * self.expansion,
kernel_size=1, stride=1, bias=False)]
self.conv_block = nn.Sequential(*conv_block)
def forward(self, x):
if self.downsample:
residual = self.residual_layer(x)
else:
residual = x
return residual + self.conv_block(x)
##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
class EncLayer(nn.Module):
def __init__(self, channel, K=16, reduction=4):
super(EncLayer, self).__init__()
out_channel = int(channel / reduction)
self.fc = nn.Sequential(
nn.Conv2d(channel, out_channel, 1),
nn.BatchNorm2d(out_channel),
nn.ReLU(inplace=True),
encoding.nn.Encoding(D=out_channel,K=K),
encoding.nn.View(-1, out_channel*K),
encoding.nn.Normalize(),
nn.Linear(out_channel*K, channel),
nn.Sigmoid()
)
def forward(self, x):
b, c, _, _ = x.size()
y = self.fc(x).view(b, c, 1, 1)
return x * y
class EncDropLayer(nn.Module):
def __init__(self, channel, K=16, reduction=4):
super(EncDropLayer, self).__init__()
out_channel = int(channel / reduction)
self.fc = nn.Sequential(
nn.Conv2d(channel, out_channel, 1),
nn.BatchNorm2d(out_channel),
nn.ReLU(inplace=True),
encoding.nn.EncodingDrop(D=out_channel,K=K),
encoding.nn.View(-1, out_channel*K),
encoding.nn.Normalize(),
nn.Linear(out_channel*K, channel),
nn.Sigmoid()
)
def forward(self, x):
b, c, _, _ = x.size()
y = self.fc(x).view(b, c, 1, 1)
return x * y
class EncBasicBlock(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride=1, K=16, ELayer=EncLayer):
super(EncBasicBlock, self).__init__()
self.conv1 = conv3x3(inplanes, planes, stride)
self.bn1 = nn.BatchNorm2d(planes)
self.relu = nn.ReLU(inplace=True)
self.conv2 = conv3x3(planes, planes, 1)
self.bn2 = nn.BatchNorm2d(planes)
self.se = ELayer(planes, K, self.expansion*4)
self.stride = stride
if inplanes != planes or stride !=1 :
self.downsample = True
self.residual_layer = nn.Conv2d(inplanes, planes,
kernel_size=1, stride=stride)
else:
self.downsample = False
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.se(out)
if self.downsample:
residual = self.residual_layer(x)
out += residual
out = self.relu(out)
return out
class EncBottleneck(nn.Module):
expansion = 4
def __init__(self, inplanes, planes, stride=1, downsample=None, K=16, ELayer=EncLayer):
super(EncBottleneck, self).__init__()
self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3,
stride=stride, padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.conv3 = nn.Conv2d(planes, planes * self.expansion,
kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm2d(planes * self.expansion)
self.relu = nn.ReLU(inplace=True)
self.se = ELayer(planes * self.expansion, K, self.expansion*4)
self.stride = stride
if inplanes != planes * self.expansion or stride !=1 :
self.downsample = True
self.residual_layer = nn.Conv2d(inplanes,
planes* self.expansion, kernel_size=1, stride=stride)
else:
self.downsample = False
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
out = self.se(out)
if self.downsample:
residual = self.residual_layer(x)
out += residual
out = self.relu(out)
return out
##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
## Created by: Hang Zhang
## ECE Department, Rutgers University
## Email: zhang.hang@rutgers.edu
## Copyright (c) 2017
##
## This source code is licensed under the MIT-style license found in the
## LICENSE file in the root directory of this source tree
##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
import torch
import torch.nn as nn
from torch.autograd import Variable
import model.mynn as nn2
import encoding
class Net(nn.Module):
def __init__(self, args):
super(Net, self).__init__()
num_blocks=[2,2,2]
block=nn2.Basicblock
if block == nn2.Basicblock:
self.expansion = 1
else:
self.expansion = 4
self.inplanes = args.widen * 16
strides = [1, 2, 2]
model = []
# Conv_1
model += [nn.Conv2d(3, self.inplanes, kernel_size=3, padding=1),
nn.BatchNorm2d(self.inplanes),
nn.ReLU(inplace=True)]
# Residual units
model += [self._residual_unit(block, self.inplanes, num_blocks[0],
strides[0])]
for i in range(2):
model += [self._residual_unit(block,
int(2*self.inplanes/self.expansion), num_blocks[i+1],
strides[i+1])]
# Last conv layer
model += [nn.BatchNorm2d(self.inplanes),
nn.ReLU(inplace=True),
nn.AvgPool2d(8),
encoding.nn.View(-1, self.inplanes),
nn.Linear(self.inplanes, args.nclass)]
self.model = nn.Sequential(*model)
def _residual_unit(self, block, planes, n_blocks, stride):
strides = [stride] + [1]*(n_blocks-1)
layers = []
for i in range(n_blocks):
layers += [block(self.inplanes, planes, strides[i])]
self.inplanes = self.expansion*planes
return nn.Sequential(*layers)
def forward(self, input):
return self.model(input)
##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
## Created by: Hang Zhang
## ECE Department, Rutgers University
## Email: zhang.hang@rutgers.edu
## Copyright (c) 2017
##
## This source code is licensed under the MIT-style license found in the
## LICENSE file in the root directory of this source tree
##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
import os
import numpy as np
import torch
import torch.nn as nn
from torch.autograd import Variable
import encoding.dilated as dresnet
import torchvision.models as orgresnet
class Dilated_ResNet(nn.Module):
def __init__(self, nclass):
super(Dilated_ResNet, self).__init__()
self.pretrained = dresnet.resnet50(pretrained=True)
def forward(self, x):
# pre-trained ResNet feature
x = self.pretrained.conv1(x)
x = self.pretrained.bn1(x)
x = self.pretrained.relu(x)
x = self.pretrained.maxpool(x)
x = self.pretrained.layer1(x)
x = self.pretrained.layer2(x)
x = self.pretrained.layer3(x)
x = self.pretrained.layer4(x)
return x
class Org_ResNet(nn.Module):
def __init__(self, nclass):
super(Org_ResNet, self).__init__()
self.pretrained = orgresnet.resnet50(pretrained=True)
def forward(self, x):
# pre-trained ResNet feature
x = self.pretrained.conv1(x)
x = self.pretrained.bn1(x)
x = self.pretrained.relu(x)
x = self.pretrained.maxpool(x)
x = self.pretrained.layer1(x)
x = self.pretrained.layer2(x)
x = self.pretrained.layer3(x)
x = self.pretrained.layer4(x)
return x
def test_resnet():
# test the model
model1 = Dilated_ResNet(10).eval().cuda()
model2 = Org_ResNet(10).eval().cuda()
model1.eval()
model2.eval()
x = Variable(torch.Tensor(1,3, 224, 224).uniform_(-0.5,0.5)).cuda()
y1 = model1(x)
y2 = model2(x)
print(y1[0][1])
print(y2[0][1])
if __name__ == "__main__":
test_resnet()
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment