Unverified Commit 6e818309 authored by Kei-Chi Tse's avatar Kei-Chi Tse Committed by GitHub
Browse files

[Fix] Fix optimizer 'ValueError' when using PReLU activation (#2444)

* fix optimizer ValueError when using PReLU activation.

* fix lint error
parent 2478cb6f
......@@ -588,24 +588,21 @@ class FFN(BaseModule):
ffn_drop=0.,
dropout_layer=None,
add_identity=True,
init_cfg=None,
**kwargs):
init_cfg=None):
super().__init__(init_cfg)
assert num_fcs >= 2, 'num_fcs should be no less ' \
f'than 2. got {num_fcs}.'
self.embed_dims = embed_dims
self.feedforward_channels = feedforward_channels
self.num_fcs = num_fcs
self.act_cfg = act_cfg
self.activate = build_activation_layer(act_cfg)
layers = []
in_channels = embed_dims
for _ in range(num_fcs - 1):
layers.append(
Sequential(
Linear(in_channels, feedforward_channels), self.activate,
nn.Dropout(ffn_drop)))
Linear(in_channels, feedforward_channels),
build_activation_layer(act_cfg), nn.Dropout(ffn_drop)))
in_channels = feedforward_channels
layers.append(Linear(feedforward_channels, embed_dims))
layers.append(nn.Dropout(ffn_drop))
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment