Unverified Commit 47825b19 authored by Miao Zheng's avatar Miao Zheng Committed by GitHub
Browse files

[Refactoring] Revise init_weight in BaseModule (#905)

* [Refactoring] Add deprecated API warning

* revise test

* fix lint

* fix lint
parent 89efc607
......@@ -42,9 +42,9 @@ class BaseModule(nn.Module, metaclass=ABCMeta):
if not self._is_init:
if hasattr(self, 'init_cfg'):
initialize(self, self.init_cfg)
for module in self.children():
if 'init_weight' in dir(module):
module.init_weight()
for m in self.children():
if hasattr(m, 'init_weight'):
m.init_weight()
self._is_init = True
else:
warnings.warn(f'init_weight of {self.__class__.__name__} has '
......
......@@ -336,10 +336,12 @@ def test_sequential_model_weight_init():
assert torch.equal(seq_model[1].conv2d.bias,
torch.full(seq_model[1].conv2d.bias.shape, 3.))
# inner init_cfg has highter priority
layers = [build_from_cfg(cfg, COMPONENTS) for cfg in seq_model_cfg]
seq_model = Sequential(
*layers,
init_cfg=dict(
type='Constant', layer=['Conv1d', 'Conv2d'], val=4., bias=5.))
seq_model.init_weight()
assert torch.equal(seq_model[0].conv1d.weight,
torch.full(seq_model[0].conv1d.weight.shape, 0.))
assert torch.equal(seq_model[0].conv1d.bias,
......@@ -371,8 +373,12 @@ def test_modulelist_weight_init():
assert torch.equal(modellist[1].conv2d.bias,
torch.full(modellist[1].conv2d.bias.shape, 3.))
# inner init_cfg has highter priority
layers = [build_from_cfg(cfg, COMPONENTS) for cfg in models_cfg]
modellist = ModuleList(
layers, init_cfg=dict(type='Constant', val=4., bias=5.))
layers,
init_cfg=dict(
type='Constant', layer=['Conv1d', 'Conv2d'], val=4., bias=5.))
modellist.init_weight()
assert torch.equal(modellist[0].conv1d.weight,
torch.full(modellist[0].conv1d.weight.shape, 0.))
assert torch.equal(modellist[0].conv1d.bias,
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment