Unverified Commit 458b0cd2 authored by Sai-Suraj-27's avatar Sai-Suraj-27 Committed by GitHub
Browse files

fix: Updated `test_embeded_special_tokens` for luke and mluke models (#32413)

Fixed tokenizertests for luke, mluke models.
parent baf7e5c9
...@@ -146,11 +146,9 @@ class LukeTokenizerTest(TokenizerTesterMixin, unittest.TestCase): ...@@ -146,11 +146,9 @@ class LukeTokenizerTest(TokenizerTesterMixin, unittest.TestCase):
# token_type_ids should put 0 everywhere # token_type_ids should put 0 everywhere
self.assertEqual(sum(tokens_r["token_type_ids"]), sum(tokens_p["token_type_ids"])) self.assertEqual(sum(tokens_r["token_type_ids"]), sum(tokens_p["token_type_ids"]))
# token_type_ids should put 0 everywhere
self.assertEqual(sum(tokens_r["token_type_ids"]), sum(tokens_p["token_type_ids"]))
# attention_mask should put 1 everywhere, so sum over length should be 1 # attention_mask should put 1 everywhere, so sum over length should be 1
self.assertEqual( self.assertEqual(
sum(tokens_r["attention_mask"]) / len(tokens_r["attention_mask"]),
sum(tokens_p["attention_mask"]) / len(tokens_p["attention_mask"]), sum(tokens_p["attention_mask"]) / len(tokens_p["attention_mask"]),
) )
......
...@@ -109,11 +109,9 @@ class MLukeTokenizerTest(TokenizerTesterMixin, unittest.TestCase): ...@@ -109,11 +109,9 @@ class MLukeTokenizerTest(TokenizerTesterMixin, unittest.TestCase):
# token_type_ids should put 0 everywhere # token_type_ids should put 0 everywhere
self.assertEqual(sum(tokens_r["token_type_ids"]), sum(tokens_p["token_type_ids"])) self.assertEqual(sum(tokens_r["token_type_ids"]), sum(tokens_p["token_type_ids"]))
# token_type_ids should put 0 everywhere
self.assertEqual(sum(tokens_r["token_type_ids"]), sum(tokens_p["token_type_ids"]))
# attention_mask should put 1 everywhere, so sum over length should be 1 # attention_mask should put 1 everywhere, so sum over length should be 1
self.assertEqual( self.assertEqual(
sum(tokens_r["attention_mask"]) / len(tokens_r["attention_mask"]),
sum(tokens_p["attention_mask"]) / len(tokens_p["attention_mask"]), sum(tokens_p["attention_mask"]) / len(tokens_p["attention_mask"]),
) )
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment