Unverified Commit 6e818309 authored by Kei-Chi Tse's avatar Kei-Chi Tse Committed by GitHub
Browse files

[Fix] Fix optimizer 'ValueError' when using PReLU activation (#2444)

* fix optimizer ValueError when using PReLU activation.

* fix lint error
parent 2478cb6f
...@@ -588,24 +588,21 @@ class FFN(BaseModule): ...@@ -588,24 +588,21 @@ class FFN(BaseModule):
ffn_drop=0., ffn_drop=0.,
dropout_layer=None, dropout_layer=None,
add_identity=True, add_identity=True,
init_cfg=None, init_cfg=None):
**kwargs):
super().__init__(init_cfg) super().__init__(init_cfg)
assert num_fcs >= 2, 'num_fcs should be no less ' \ assert num_fcs >= 2, 'num_fcs should be no less ' \
f'than 2. got {num_fcs}.' f'than 2. got {num_fcs}.'
self.embed_dims = embed_dims self.embed_dims = embed_dims
self.feedforward_channels = feedforward_channels self.feedforward_channels = feedforward_channels
self.num_fcs = num_fcs self.num_fcs = num_fcs
self.act_cfg = act_cfg
self.activate = build_activation_layer(act_cfg)
layers = [] layers = []
in_channels = embed_dims in_channels = embed_dims
for _ in range(num_fcs - 1): for _ in range(num_fcs - 1):
layers.append( layers.append(
Sequential( Sequential(
Linear(in_channels, feedforward_channels), self.activate, Linear(in_channels, feedforward_channels),
nn.Dropout(ffn_drop))) build_activation_layer(act_cfg), nn.Dropout(ffn_drop)))
in_channels = feedforward_channels in_channels = feedforward_channels
layers.append(Linear(feedforward_channels, embed_dims)) layers.append(Linear(feedforward_channels, embed_dims))
layers.append(nn.Dropout(ffn_drop)) layers.append(nn.Dropout(ffn_drop))
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment