Commit aec35fd6 authored by Darius Lam's avatar Darius Lam
Browse files

fixed cross attention typeerror

parent ce26d3d7
......@@ -341,6 +341,7 @@ class MHA(nn.Module):
self.dwconv_qkv = nn.Conv1d(3 * embed_dim, 3 * embed_dim, kernel_size=3, padding=2,
groups=3 * embed_dim)
else:
inner_attn_cls = inner_cross_attn_cls
self.Wq = linear_cls(embed_dim, embed_dim, bias=bias, **factory_kwargs)
if not self.return_residual:
self.Wkv = linear_cls(embed_dim, 2 * embed_dim, bias=bias, **factory_kwargs)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment