Unverified Commit 503e8c8b authored by othertea's avatar othertea Committed by GitHub
Browse files

fix ValueError message in LlamaAttention (#22966)

parent 6e329593
...@@ -216,7 +216,7 @@ class LlamaAttention(nn.Module): ...@@ -216,7 +216,7 @@ class LlamaAttention(nn.Module):
if attn_weights.size() != (bsz, self.num_heads, q_len, kv_seq_len): if attn_weights.size() != (bsz, self.num_heads, q_len, kv_seq_len):
raise ValueError( raise ValueError(
f"Attention weights should be of size {(bsz * self.num_heads, q_len, kv_seq_len)}, but is" f"Attention weights should be of size {(bsz, self.num_heads, q_len, kv_seq_len)}, but is"
f" {attn_weights.size()}" f" {attn_weights.size()}"
) )
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment