Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
MMCV
Commits
3333bab6
Commit
3333bab6
authored
Oct 10, 2018
by
Kai Chen
Browse files
use weight init methods
parent
62438226
Changes
4
Show whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
17 additions
and
14 deletions
+17
-14
mmcv/cnn/__init__.py
mmcv/cnn/__init__.py
+4
-2
mmcv/cnn/resnet.py
mmcv/cnn/resnet.py
+3
-4
mmcv/cnn/vgg.py
mmcv/cnn/vgg.py
+4
-8
mmcv/cnn/weight_init.py
mmcv/cnn/weight_init.py
+6
-0
No files found.
mmcv/cnn/__init__.py
View file @
3333bab6
from
.alexnet
import
AlexNet
from
.alexnet
import
AlexNet
from
.vgg
import
VGG
,
make_vgg_layer
from
.vgg
import
VGG
,
make_vgg_layer
from
.resnet
import
ResNet
,
make_res_layer
from
.resnet
import
ResNet
,
make_res_layer
from
.weight_init
import
xavier_init
,
normal_init
,
uniform_init
,
kaiming_init
from
.weight_init
import
(
constant_init
,
xavier_init
,
normal_init
,
uniform_init
,
kaiming_init
)
__all__
=
[
__all__
=
[
'AlexNet'
,
'VGG'
,
'make_vgg_layer'
,
'ResNet'
,
'make_res_layer'
,
'AlexNet'
,
'VGG'
,
'make_vgg_layer'
,
'ResNet'
,
'make_res_layer'
,
'xavier_init'
,
'normal_init'
,
'uniform_init'
,
'kaiming_init'
'constant_init'
,
'xavier_init'
,
'normal_init'
,
'uniform_init'
,
'kaiming_init'
]
]
mmcv/cnn/resnet.py
View file @
3333bab6
...
@@ -4,6 +4,7 @@ import math
...
@@ -4,6 +4,7 @@ import math
import
torch.nn
as
nn
import
torch.nn
as
nn
import
torch.utils.checkpoint
as
cp
import
torch.utils.checkpoint
as
cp
from
.weight_init
import
constant_init
,
kaiming_init
from
..runner
import
load_checkpoint
from
..runner
import
load_checkpoint
...
@@ -268,11 +269,9 @@ class ResNet(nn.Module):
...
@@ -268,11 +269,9 @@ class ResNet(nn.Module):
elif
pretrained
is
None
:
elif
pretrained
is
None
:
for
m
in
self
.
modules
():
for
m
in
self
.
modules
():
if
isinstance
(
m
,
nn
.
Conv2d
):
if
isinstance
(
m
,
nn
.
Conv2d
):
n
=
m
.
kernel_size
[
0
]
*
m
.
kernel_size
[
1
]
*
m
.
out_channels
kaiming_init
(
m
)
nn
.
init
.
normal_
(
m
.
weight
,
0
,
math
.
sqrt
(
2.
/
n
))
elif
isinstance
(
m
,
nn
.
BatchNorm2d
):
elif
isinstance
(
m
,
nn
.
BatchNorm2d
):
nn
.
init
.
constant_
(
m
.
weight
,
1
)
constant_init
(
m
,
1
)
nn
.
init
.
constant_
(
m
.
bias
,
0
)
else
:
else
:
raise
TypeError
(
'pretrained must be a str or None'
)
raise
TypeError
(
'pretrained must be a str or None'
)
...
...
mmcv/cnn/vgg.py
View file @
3333bab6
...
@@ -2,6 +2,7 @@ import logging
...
@@ -2,6 +2,7 @@ import logging
import
torch.nn
as
nn
import
torch.nn
as
nn
from
.weight_init
import
constant_init
,
normal_init
,
kaiming_init
from
..runner
import
load_checkpoint
from
..runner
import
load_checkpoint
...
@@ -112,16 +113,11 @@ class VGG(nn.Module):
...
@@ -112,16 +113,11 @@ class VGG(nn.Module):
elif
pretrained
is
None
:
elif
pretrained
is
None
:
for
m
in
self
.
modules
():
for
m
in
self
.
modules
():
if
isinstance
(
m
,
nn
.
Conv2d
):
if
isinstance
(
m
,
nn
.
Conv2d
):
nn
.
init
.
kaiming_normal_
(
kaiming_init
(
m
)
m
.
weight
,
mode
=
'fan_out'
,
nonlinearity
=
'relu'
)
if
m
.
bias
is
not
None
:
nn
.
init
.
constant_
(
m
.
bias
,
0
)
elif
isinstance
(
m
,
nn
.
BatchNorm2d
):
elif
isinstance
(
m
,
nn
.
BatchNorm2d
):
nn
.
init
.
constant_
(
m
.
weight
,
1
)
constant_init
(
m
,
1
)
nn
.
init
.
constant_
(
m
.
bias
,
0
)
elif
isinstance
(
m
,
nn
.
Linear
):
elif
isinstance
(
m
,
nn
.
Linear
):
nn
.
init
.
normal_
(
m
.
weight
,
0
,
0.01
)
normal_init
(
m
,
std
=
0.01
)
nn
.
init
.
constant_
(
m
.
bias
,
0
)
else
:
else
:
raise
TypeError
(
'pretrained must be a str or None'
)
raise
TypeError
(
'pretrained must be a str or None'
)
...
...
mmcv/cnn/weight_init.py
View file @
3333bab6
import
torch.nn
as
nn
import
torch.nn
as
nn
def
constant_init
(
module
,
val
,
bias
=
0
):
nn
.
init
.
constant_
(
module
.
weight
,
val
)
if
hasattr
(
module
,
'bias'
):
nn
.
init
.
constant_
(
module
.
bias
,
bias
)
def
xavier_init
(
module
,
gain
=
1
,
bias
=
0
,
distribution
=
'normal'
):
def
xavier_init
(
module
,
gain
=
1
,
bias
=
0
,
distribution
=
'normal'
):
assert
distribution
in
[
'uniform'
,
'normal'
]
assert
distribution
in
[
'uniform'
,
'normal'
]
if
distribution
==
'uniform'
:
if
distribution
==
'uniform'
:
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment