Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
MMCV
Commits
fa84b264
Unverified
Commit
fa84b264
authored
Mar 17, 2020
by
Jerry Jiarui XU
Committed by
GitHub
Mar 17, 2020
Browse files
add bias_init_with_prob (#213)
* add bia_init_with_prob * add test_weight_init
parent
12e5913b
Changes
3
Show whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
88 additions
and
3 deletions
+88
-3
mmcv/cnn/__init__.py
mmcv/cnn/__init__.py
+4
-3
mmcv/cnn/weight_init.py
mmcv/cnn/weight_init.py
+7
-0
tests/test_weight_init.py
tests/test_weight_init.py
+77
-0
No files found.
mmcv/cnn/__init__.py
View file @
fa84b264
...
...
@@ -2,11 +2,12 @@
from
.alexnet
import
AlexNet
from
.resnet
import
ResNet
,
make_res_layer
from
.vgg
import
VGG
,
make_vgg_layer
from
.weight_init
import
(
caffe2_xavier_init
,
constant_init
,
kaiming_init
,
normal_init
,
uniform_init
,
xavier_init
)
from
.weight_init
import
(
bias_init_with_prob
,
caffe2_xavier_init
,
constant_init
,
kaiming_init
,
normal_init
,
uniform_init
,
xavier_init
)
__all__
=
[
'AlexNet'
,
'VGG'
,
'make_vgg_layer'
,
'ResNet'
,
'make_res_layer'
,
'constant_init'
,
'xavier_init'
,
'normal_init'
,
'uniform_init'
,
'kaiming_init'
,
'caffe2_xavier_init'
'kaiming_init'
,
'caffe2_xavier_init'
,
'bias_init_with_prob'
]
mmcv/cnn/weight_init.py
View file @
fa84b264
# Copyright (c) Open-MMLab. All rights reserved.
import
numpy
as
np
import
torch.nn
as
nn
...
...
@@ -57,3 +58,9 @@ def caffe2_xavier_init(module, bias=0):
mode
=
'fan_in'
,
nonlinearity
=
'leaky_relu'
,
distribution
=
'uniform'
)
def
bias_init_with_prob
(
prior_prob
):
""" initialize conv/fc bias value according to giving probablity"""
bias_init
=
float
(
-
np
.
log
((
1
-
prior_prob
)
/
prior_prob
))
return
bias_init
tests/test_weight_init.py
0 → 100644
View file @
fa84b264
# Copyright (c) Open-MMLab. All rights reserved.
import
numpy
as
np
import
pytest
import
torch
from
torch
import
nn
from
mmcv.cnn
import
(
bias_init_with_prob
,
caffe2_xavier_init
,
constant_init
,
kaiming_init
,
normal_init
,
uniform_init
,
xavier_init
)
def
test_constant_init
():
conv_module
=
nn
.
Conv2d
(
3
,
16
,
3
)
constant_init
(
conv_module
,
0.1
)
assert
conv_module
.
weight
.
allclose
(
torch
.
full_like
(
conv_module
.
weight
,
0.1
))
assert
conv_module
.
bias
.
allclose
(
torch
.
zeros_like
(
conv_module
.
bias
))
conv_module_no_bias
=
nn
.
Conv2d
(
3
,
16
,
3
,
bias
=
False
)
constant_init
(
conv_module_no_bias
,
0.1
)
assert
conv_module
.
weight
.
allclose
(
torch
.
full_like
(
conv_module
.
weight
,
0.1
))
def
test_xavier_init
():
conv_module
=
nn
.
Conv2d
(
3
,
16
,
3
)
xavier_init
(
conv_module
,
bias
=
0.1
)
assert
conv_module
.
bias
.
allclose
(
torch
.
full_like
(
conv_module
.
bias
,
0.1
))
xavier_init
(
conv_module
,
distribution
=
'uniform'
)
# TODO: sanity check of weight distribution, e.g. mean, std
with
pytest
.
raises
(
AssertionError
):
xavier_init
(
conv_module
,
distribution
=
'student-t'
)
conv_module_no_bias
=
nn
.
Conv2d
(
3
,
16
,
3
,
bias
=
False
)
xavier_init
(
conv_module_no_bias
)
def
test_normal_init
():
conv_module
=
nn
.
Conv2d
(
3
,
16
,
3
)
normal_init
(
conv_module
,
bias
=
0.1
)
# TODO: sanity check of weight distribution, e.g. mean, std
assert
conv_module
.
bias
.
allclose
(
torch
.
full_like
(
conv_module
.
bias
,
0.1
))
conv_module_no_bias
=
nn
.
Conv2d
(
3
,
16
,
3
,
bias
=
False
)
normal_init
(
conv_module_no_bias
)
# TODO: sanity check distribution, e.g. mean, std
def
test_uniform_init
():
conv_module
=
nn
.
Conv2d
(
3
,
16
,
3
)
uniform_init
(
conv_module
,
bias
=
0.1
)
# TODO: sanity check of weight distribution, e.g. mean, std
assert
conv_module
.
bias
.
allclose
(
torch
.
full_like
(
conv_module
.
bias
,
0.1
))
conv_module_no_bias
=
nn
.
Conv2d
(
3
,
16
,
3
,
bias
=
False
)
uniform_init
(
conv_module_no_bias
)
def
test_kaiming_init
():
conv_module
=
nn
.
Conv2d
(
3
,
16
,
3
)
kaiming_init
(
conv_module
,
bias
=
0.1
)
# TODO: sanity check of weight distribution, e.g. mean, std
assert
conv_module
.
bias
.
allclose
(
torch
.
full_like
(
conv_module
.
bias
,
0.1
))
kaiming_init
(
conv_module
,
distribution
=
'uniform'
)
with
pytest
.
raises
(
AssertionError
):
kaiming_init
(
conv_module
,
distribution
=
'student-t'
)
conv_module_no_bias
=
nn
.
Conv2d
(
3
,
16
,
3
,
bias
=
False
)
kaiming_init
(
conv_module_no_bias
)
def
test_caffe_xavier_init
():
conv_module
=
nn
.
Conv2d
(
3
,
16
,
3
)
caffe2_xavier_init
(
conv_module
)
def
test_bias_init_with_prob
():
conv_module
=
nn
.
Conv2d
(
3
,
16
,
3
)
prior_prob
=
0.1
normal_init
(
conv_module
,
bias
=
bias_init_with_prob
(
0.1
))
# TODO: sanity check of weight distribution, e.g. mean, std
bias
=
float
(
-
np
.
log
((
1
-
prior_prob
)
/
prior_prob
))
assert
conv_module
.
bias
.
allclose
(
torch
.
full_like
(
conv_module
.
bias
,
bias
))
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment