Unverified Commit a4e1a1d0 authored by amyeroberts's avatar amyeroberts Committed by GitHub
Browse files

🚨 FLAVA: Remove double softmax (#31322)

Remove double softmax
parent 8fff07de
......@@ -472,8 +472,6 @@ class FlavaSelfAttention(nn.Module):
# Normalize the attention scores to probabilities.
attention_probs = nn.functional.softmax(attention_scores, dim=-1)
# Normalize the attention scores to probabilities.
attention_probs = nn.functional.softmax(attention_scores, dim=-1)
# This is actually dropping out entire tokens to attend to, which might
# seem a bit unusual, but is taken from the original Transformer paper.
......
......@@ -1285,7 +1285,7 @@ class FlavaModelIntegrationTest(unittest.TestCase):
# verify the embeddings
self.assertAlmostEqual(outputs.image_embeddings.sum().item(), -1352.53540, places=4)
self.assertAlmostEqual(outputs.text_embeddings.sum().item(), -198.98225, places=4)
self.assertAlmostEqual(outputs.multimodal_embeddings.sum().item(), -4030.4602050, places=4)
self.assertAlmostEqual(outputs.multimodal_embeddings.sum().item(), -4030.4604492, places=4)
@require_vision
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment