Unverified Commit e83cf581 authored by DomHudson's avatar DomHudson Committed by GitHub
Browse files

Fix sentence fragment within test comments (#31218)

parent 83238eee
...@@ -474,8 +474,7 @@ class Data2VecTextModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTes ...@@ -474,8 +474,7 @@ class Data2VecTextModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTes
self.assertIsNotNone(model) self.assertIsNotNone(model)
def test_create_position_ids_respects_padding_index(self): def test_create_position_ids_respects_padding_index(self):
"""Ensure that the default position ids only assign a sequential . This is a regression """This is a regression test for https://github.com/huggingface/transformers/issues/1761
test for https://github.com/huggingface/transformers/issues/1761
The position ids should be masked with the embedding object's padding index. Therefore, the The position ids should be masked with the embedding object's padding index. Therefore, the
first available non-padding position index is Data2VecTextForTextEmbeddings.padding_idx + 1 first available non-padding position index is Data2VecTextForTextEmbeddings.padding_idx + 1
...@@ -493,8 +492,7 @@ class Data2VecTextModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTes ...@@ -493,8 +492,7 @@ class Data2VecTextModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTes
self.assertTrue(torch.all(torch.eq(position_ids, expected_positions))) self.assertTrue(torch.all(torch.eq(position_ids, expected_positions)))
def test_create_position_ids_from_inputs_embeds(self): def test_create_position_ids_from_inputs_embeds(self):
"""Ensure that the default position ids only assign a sequential . This is a regression """This is a regression test for https://github.com/huggingface/transformers/issues/1761
test for https://github.com/huggingface/transformers/issues/1761
The position ids should be masked with the embedding object's padding index. Therefore, the The position ids should be masked with the embedding object's padding index. Therefore, the
first available non-padding position index is Data2VecTextForTextEmbeddings.padding_idx + 1 first available non-padding position index is Data2VecTextForTextEmbeddings.padding_idx + 1
......
...@@ -246,8 +246,7 @@ class EsmModelTest(ModelTesterMixin, PipelineTesterMixin, unittest.TestCase): ...@@ -246,8 +246,7 @@ class EsmModelTest(ModelTesterMixin, PipelineTesterMixin, unittest.TestCase):
self.assertIsNotNone(model) self.assertIsNotNone(model)
def test_create_position_ids_respects_padding_index(self): def test_create_position_ids_respects_padding_index(self):
"""Ensure that the default position ids only assign a sequential . This is a regression """This is a regression test for https://github.com/huggingface/transformers/issues/1761
test for https://github.com/huggingface/transformers/issues/1761
The position ids should be masked with the embedding object's padding index. Therefore, the The position ids should be masked with the embedding object's padding index. Therefore, the
first available non-padding position index is EsmEmbeddings.padding_idx + 1 first available non-padding position index is EsmEmbeddings.padding_idx + 1
...@@ -271,8 +270,7 @@ class EsmModelTest(ModelTesterMixin, PipelineTesterMixin, unittest.TestCase): ...@@ -271,8 +270,7 @@ class EsmModelTest(ModelTesterMixin, PipelineTesterMixin, unittest.TestCase):
self.assertTrue(torch.all(torch.eq(position_ids, expected_positions))) self.assertTrue(torch.all(torch.eq(position_ids, expected_positions)))
def test_create_position_ids_from_inputs_embeds(self): def test_create_position_ids_from_inputs_embeds(self):
"""Ensure that the default position ids only assign a sequential . This is a regression """This is a regression test for https://github.com/huggingface/transformers/issues/1761
test for https://github.com/huggingface/transformers/issues/1761
The position ids should be masked with the embedding object's padding index. Therefore, the The position ids should be masked with the embedding object's padding index. Therefore, the
first available non-padding position index is EsmEmbeddings.padding_idx + 1 first available non-padding position index is EsmEmbeddings.padding_idx + 1
......
...@@ -296,8 +296,7 @@ class IBertModelTest(ModelTesterMixin, PipelineTesterMixin, unittest.TestCase): ...@@ -296,8 +296,7 @@ class IBertModelTest(ModelTesterMixin, PipelineTesterMixin, unittest.TestCase):
self.assertIsNotNone(model) self.assertIsNotNone(model)
def test_create_position_ids_respects_padding_index(self): def test_create_position_ids_respects_padding_index(self):
"""Ensure that the default position ids only assign a sequential . This is a regression """This is a regression test for https://github.com/huggingface/transformers/issues/1761
test for https://github.com/huggingface/transformers/issues/1761
The position ids should be masked with the embedding object's padding index. Therefore, the The position ids should be masked with the embedding object's padding index. Therefore, the
first available non-padding position index is IBertEmbeddings.padding_idx + 1 first available non-padding position index is IBertEmbeddings.padding_idx + 1
...@@ -315,9 +314,7 @@ class IBertModelTest(ModelTesterMixin, PipelineTesterMixin, unittest.TestCase): ...@@ -315,9 +314,7 @@ class IBertModelTest(ModelTesterMixin, PipelineTesterMixin, unittest.TestCase):
self.assertTrue(torch.all(torch.eq(position_ids, expected_positions))) self.assertTrue(torch.all(torch.eq(position_ids, expected_positions)))
def test_create_position_ids_from_inputs_embeds(self): def test_create_position_ids_from_inputs_embeds(self):
"""Ensure that the default position ids only assign a sequential . This is a regression """This is a regression test for https://github.com/huggingface/transformers/issues/1761
test for https://github.com/huggingface/transformers/issues/1761
The position ids should be masked with the embedding object's padding index. Therefore, the The position ids should be masked with the embedding object's padding index. Therefore, the
first available non-padding position index is IBertEmbeddings.padding_idx + 1 first available non-padding position index is IBertEmbeddings.padding_idx + 1
""" """
......
...@@ -481,8 +481,7 @@ class RobertaModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMi ...@@ -481,8 +481,7 @@ class RobertaModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMi
self.assertIsNotNone(model) self.assertIsNotNone(model)
def test_create_position_ids_respects_padding_index(self): def test_create_position_ids_respects_padding_index(self):
"""Ensure that the default position ids only assign a sequential . This is a regression """This is a regression test for https://github.com/huggingface/transformers/issues/1761
test for https://github.com/huggingface/transformers/issues/1761
The position ids should be masked with the embedding object's padding index. Therefore, the The position ids should be masked with the embedding object's padding index. Therefore, the
first available non-padding position index is RobertaEmbeddings.padding_idx + 1 first available non-padding position index is RobertaEmbeddings.padding_idx + 1
...@@ -500,8 +499,7 @@ class RobertaModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMi ...@@ -500,8 +499,7 @@ class RobertaModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMi
self.assertTrue(torch.all(torch.eq(position_ids, expected_positions))) self.assertTrue(torch.all(torch.eq(position_ids, expected_positions)))
def test_create_position_ids_from_inputs_embeds(self): def test_create_position_ids_from_inputs_embeds(self):
"""Ensure that the default position ids only assign a sequential . This is a regression """This is a regression test for https://github.com/huggingface/transformers/issues/1761
test for https://github.com/huggingface/transformers/issues/1761
The position ids should be masked with the embedding object's padding index. Therefore, the The position ids should be masked with the embedding object's padding index. Therefore, the
first available non-padding position index is RobertaEmbeddings.padding_idx + 1 first available non-padding position index is RobertaEmbeddings.padding_idx + 1
......
...@@ -488,8 +488,7 @@ class RobertaPreLayerNormModelTest(ModelTesterMixin, GenerationTesterMixin, Pipe ...@@ -488,8 +488,7 @@ class RobertaPreLayerNormModelTest(ModelTesterMixin, GenerationTesterMixin, Pipe
# Copied from tests.models.roberta.test_modeling_roberta.RobertaModelTest.test_create_position_ids_respects_padding_index with Roberta->RobertaPreLayerNorm # Copied from tests.models.roberta.test_modeling_roberta.RobertaModelTest.test_create_position_ids_respects_padding_index with Roberta->RobertaPreLayerNorm
def test_create_position_ids_respects_padding_index(self): def test_create_position_ids_respects_padding_index(self):
"""Ensure that the default position ids only assign a sequential . This is a regression """This is a regression test for https://github.com/huggingface/transformers/issues/1761
test for https://github.com/huggingface/transformers/issues/1761
The position ids should be masked with the embedding object's padding index. Therefore, the The position ids should be masked with the embedding object's padding index. Therefore, the
first available non-padding position index is RobertaPreLayerNormEmbeddings.padding_idx + 1 first available non-padding position index is RobertaPreLayerNormEmbeddings.padding_idx + 1
...@@ -508,8 +507,7 @@ class RobertaPreLayerNormModelTest(ModelTesterMixin, GenerationTesterMixin, Pipe ...@@ -508,8 +507,7 @@ class RobertaPreLayerNormModelTest(ModelTesterMixin, GenerationTesterMixin, Pipe
# Copied from tests.models.roberta.test_modeling_roberta.RobertaModelTest.test_create_position_ids_from_inputs_embeds with Roberta->RobertaPreLayerNorm # Copied from tests.models.roberta.test_modeling_roberta.RobertaModelTest.test_create_position_ids_from_inputs_embeds with Roberta->RobertaPreLayerNorm
def test_create_position_ids_from_inputs_embeds(self): def test_create_position_ids_from_inputs_embeds(self):
"""Ensure that the default position ids only assign a sequential . This is a regression """This is a regression test for https://github.com/huggingface/transformers/issues/1761
test for https://github.com/huggingface/transformers/issues/1761
The position ids should be masked with the embedding object's padding index. Therefore, the The position ids should be masked with the embedding object's padding index. Therefore, the
first available non-padding position index is RobertaPreLayerNormEmbeddings.padding_idx + 1 first available non-padding position index is RobertaPreLayerNormEmbeddings.padding_idx + 1
......
...@@ -475,8 +475,7 @@ class XLMRobertaXLModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTes ...@@ -475,8 +475,7 @@ class XLMRobertaXLModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTes
self.model_tester.create_and_check_for_question_answering(*config_and_inputs) self.model_tester.create_and_check_for_question_answering(*config_and_inputs)
def test_create_position_ids_respects_padding_index(self): def test_create_position_ids_respects_padding_index(self):
"""Ensure that the default position ids only assign a sequential . This is a regression """This is a regression test for https://github.com/huggingface/transformers/issues/1761
test for https://github.com/huggingface/transformers/issues/1761
The position ids should be masked with the embedding object's padding index. Therefore, the The position ids should be masked with the embedding object's padding index. Therefore, the
first available non-padding position index is XLMRobertaXLEmbeddings.padding_idx + 1 first available non-padding position index is XLMRobertaXLEmbeddings.padding_idx + 1
...@@ -494,8 +493,7 @@ class XLMRobertaXLModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTes ...@@ -494,8 +493,7 @@ class XLMRobertaXLModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTes
self.assertTrue(torch.all(torch.eq(position_ids, expected_positions))) self.assertTrue(torch.all(torch.eq(position_ids, expected_positions)))
def test_create_position_ids_from_inputs_embeds(self): def test_create_position_ids_from_inputs_embeds(self):
"""Ensure that the default position ids only assign a sequential . This is a regression """This is a regression test for https://github.com/huggingface/transformers/issues/1761
test for https://github.com/huggingface/transformers/issues/1761
The position ids should be masked with the embedding object's padding index. Therefore, the The position ids should be masked with the embedding object's padding index. Therefore, the
first available non-padding position index is XLMRobertaXLEmbeddings.padding_idx + 1 first available non-padding position index is XLMRobertaXLEmbeddings.padding_idx + 1
......
...@@ -472,8 +472,7 @@ class XmodModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin ...@@ -472,8 +472,7 @@ class XmodModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin
self.model_tester.create_and_check_for_question_answering(*config_and_inputs) self.model_tester.create_and_check_for_question_answering(*config_and_inputs)
def test_create_position_ids_respects_padding_index(self): def test_create_position_ids_respects_padding_index(self):
"""Ensure that the default position ids only assign a sequential . This is a regression """This is a regression test for https://github.com/huggingface/transformers/issues/1761
test for https://github.com/huggingface/transformers/issues/1761
The position ids should be masked with the embedding object's padding index. Therefore, the The position ids should be masked with the embedding object's padding index. Therefore, the
first available non-padding position index is XmodEmbeddings.padding_idx + 1 first available non-padding position index is XmodEmbeddings.padding_idx + 1
...@@ -491,8 +490,7 @@ class XmodModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin ...@@ -491,8 +490,7 @@ class XmodModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin
self.assertTrue(torch.all(torch.eq(position_ids, expected_positions))) self.assertTrue(torch.all(torch.eq(position_ids, expected_positions)))
def test_create_position_ids_from_inputs_embeds(self): def test_create_position_ids_from_inputs_embeds(self):
"""Ensure that the default position ids only assign a sequential . This is a regression """This is a regression test for https://github.com/huggingface/transformers/issues/1761
test for https://github.com/huggingface/transformers/issues/1761
The position ids should be masked with the embedding object's padding index. Therefore, the The position ids should be masked with the embedding object's padding index. Therefore, the
first available non-padding position index is XmodEmbeddings.padding_idx + 1 first available non-padding position index is XmodEmbeddings.padding_idx + 1
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment