# The experiments reported in the paper used the default initialization of bn.weight (all as 1). But changing the initialization may be useful in some cases.
ifsingle_init:
# Initialize the bn.weight of dbb_origin as 1 and others as 0. This is not the default setting.
msg="The function 'replace_sub()' is deprecated, please use 'upgrade_sublayer()' instead."
raiseDeprecationWarning(msg)
defupgrade_sublayer(
self,
layer_name_pattern:Union[str,List[str]],
handle_func:Callable[[nn.Module,str],nn.Module],
)->Dict[str,nn.Module]:
"""use 'handle_func' to modify the sub-layer(s) specified by 'layer_name_pattern'.
Args:
layer_name_pattern (Union[str, List[str]]): The name of layer to be modified by 'handle_func'.
handle_func (Callable[[nn.Module, str], nn.Module]): The function to modify target layer specified by 'layer_name_pattern'. The formal params are the layer(nn.Module) and pattern(str) that is (a member of) layer_name_pattern (when layer_name_pattern is List type). And the return is the layer processed.
Returns:
Dict[str, nn.Module]: The key is the pattern and corresponding value is the result returned by 'handle_func()'.
Examples:
from paddle import nn
import paddleclas
def rep_func(layer: nn.Module, pattern: str):
new_layer = nn.Conv2d(
in_channels=layer._in_channels,
out_channels=layer._out_channels,
kernel_size=5,
padding=2
)
return new_layer
net = paddleclas.MobileNetV1()
res = net.upgrade_sublayer(layer_name_pattern=["blocks[11].depthwise_conv.conv", "blocks[12].depthwise_conv.conv"], handle_func=rep_func)
print(res)
# {'blocks[11].depthwise_conv.conv': the corresponding new_layer, 'blocks[12].depthwise_conv.conv': the corresponding new_layer}
"""
ifnotisinstance(layer_name_pattern,list):
layer_name_pattern=[layer_name_pattern]
hit_layer_pattern_list=[]
forpatterninlayer_name_pattern:
# parse pattern to find target layer and its parent