Unverified Commit cb0ee1e3 authored by ZhangShilong's avatar ZhangShilong Committed by GitHub
Browse files

Remove redundant self.init_cfg (#959)

parent ae411509
......@@ -61,7 +61,6 @@ class MultiheadAttention(BaseModule):
self.attn = nn.MultiheadAttention(embed_dims, num_heads, dropout,
**kwargs)
self.dropout = nn.Dropout(dropout)
self.init_cfg = init_cfg
def forward(self,
query,
......@@ -171,7 +170,6 @@ class FFN(BaseModule):
self.num_fcs = num_fcs
self.act_cfg = act_cfg
self.dropout = dropout
self.init_cfg = init_cfg
self.activate = build_activation_layer(act_cfg)
layers = []
......@@ -422,7 +420,6 @@ class TransformerLayerSequence(BaseModule):
else:
assert isinstance(transformerlayers, list) and \
len(transformerlayers) == num_layers
self.init_cfg = init_cfg
self.num_layers = num_layers
operation_order = transformerlayers[0]['operation_order']
self.pre_norm = operation_order[0] == 'norm'
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment