Unverified Commit 641e5f3f authored by Suraj Patil's avatar Suraj Patil Committed by GitHub
Browse files

Fix XGLM cross attention (#16290)

parent f3938680
......@@ -399,7 +399,7 @@ class XGLMDecoderLayer(nn.Module):
self.activation_dropout = config.activation_dropout
if config.add_cross_attention:
self.crossattention = XGLMAttention(
self.encoder_attn = XGLMAttention(
embed_dim=self.embed_dim,
num_heads=config.attention_heads,
dropout=config.attention_dropout,
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment