Commit 93ea2438 authored by Sri Krishna's avatar Sri Krishna Committed by Francisco Massa
Browse files

Update densenet.py (#368)

* Update densenet.py

added missing weight init for densenet

* Update densenet.py

following official implementation [here](https://github.com/liuzhuang13/DenseNet/blob/master/models/densenet.lua#L160)

* Update densenet.py

changed to nn.init & fixed lint
parent 080b9543
...@@ -153,6 +153,16 @@ class DenseNet(nn.Module): ...@@ -153,6 +153,16 @@ class DenseNet(nn.Module):
# Linear layer # Linear layer
self.classifier = nn.Linear(num_features, num_classes) self.classifier = nn.Linear(num_features, num_classes)
# Official init from torch repo.
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal(m.weight.data)
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
elif isinstance(m, nn.Linear):
m.bias.data.zero_()
def forward(self, x): def forward(self, x):
features = self.features(x) features = self.features(x)
out = F.relu(features, inplace=True) out = F.relu(features, inplace=True)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment