Unverified Commit 4c3ae89a authored by Daniel Stancl's avatar Daniel Stancl Committed by GitHub
Browse files

Remove redundant `test_head_masking = True` flags in test files (#9858)

* Remove redundant test_head_masking = True flags

* Remove all redundant test_head_masking flags in PyTorch test_modeling_* files

* Make test_head_masking = True as a default choice in test_modeling_tf_commong.py

* Remove all redundant test_head_masking flags in TensorFlow
test_modeling_tf_* files

* Put back test_head_masking=False fot TFT5 models
parent caddf912
...@@ -402,7 +402,6 @@ class BartModelTest(ModelTesterMixin, GenerationTesterMixin, unittest.TestCase): ...@@ -402,7 +402,6 @@ class BartModelTest(ModelTesterMixin, GenerationTesterMixin, unittest.TestCase):
all_generative_model_classes = (BartForConditionalGeneration,) if is_torch_available() else () all_generative_model_classes = (BartForConditionalGeneration,) if is_torch_available() else ()
is_encoder_decoder = True is_encoder_decoder = True
test_pruning = False test_pruning = False
test_head_masking = True
test_missing_keys = False test_missing_keys = False
def setUp(self): def setUp(self):
......
...@@ -206,7 +206,6 @@ class BlenderbotModelTest(ModelTesterMixin, GenerationTesterMixin, unittest.Test ...@@ -206,7 +206,6 @@ class BlenderbotModelTest(ModelTesterMixin, GenerationTesterMixin, unittest.Test
all_generative_model_classes = (BlenderbotForConditionalGeneration,) if is_torch_available() else () all_generative_model_classes = (BlenderbotForConditionalGeneration,) if is_torch_available() else ()
is_encoder_decoder = True is_encoder_decoder = True
test_pruning = False test_pruning = False
test_head_masking = True
test_missing_keys = False test_missing_keys = False
def setUp(self): def setUp(self):
......
...@@ -214,7 +214,6 @@ class BlenderbotSmallModelTest(ModelTesterMixin, GenerationTesterMixin, unittest ...@@ -214,7 +214,6 @@ class BlenderbotSmallModelTest(ModelTesterMixin, GenerationTesterMixin, unittest
all_generative_model_classes = (BlenderbotSmallForConditionalGeneration,) if is_torch_available() else () all_generative_model_classes = (BlenderbotSmallForConditionalGeneration,) if is_torch_available() else ()
is_encoder_decoder = True is_encoder_decoder = True
test_pruning = False test_pruning = False
test_head_masking = True
test_missing_keys = False test_missing_keys = False
def setUp(self): def setUp(self):
......
...@@ -209,7 +209,6 @@ class DistilBertModelTest(ModelTesterMixin, unittest.TestCase): ...@@ -209,7 +209,6 @@ class DistilBertModelTest(ModelTesterMixin, unittest.TestCase):
test_pruning = True test_pruning = True
test_torchscript = True test_torchscript = True
test_resize_embeddings = True test_resize_embeddings = True
test_head_masking = True
def setUp(self): def setUp(self):
self.model_tester = DistilBertModelTester(self) self.model_tester = DistilBertModelTester(self)
......
...@@ -527,10 +527,6 @@ class LxmertModelTest(ModelTesterMixin, unittest.TestCase): ...@@ -527,10 +527,6 @@ class LxmertModelTest(ModelTesterMixin, unittest.TestCase):
test_pruning = False test_pruning = False
test_torchscript = False test_torchscript = False
test_head_masking = False
test_pruning = False
test_torchscript = False
# overwrite function because qa models takes different input label shape # overwrite function because qa models takes different input label shape
def _prepare_for_class(self, inputs_dict, model_class, return_labels=False): def _prepare_for_class(self, inputs_dict, model_class, return_labels=False):
inputs_dict = copy.deepcopy(inputs_dict) inputs_dict = copy.deepcopy(inputs_dict)
......
...@@ -223,7 +223,6 @@ class MarianModelTest(ModelTesterMixin, GenerationTesterMixin, unittest.TestCase ...@@ -223,7 +223,6 @@ class MarianModelTest(ModelTesterMixin, GenerationTesterMixin, unittest.TestCase
all_generative_model_classes = (MarianMTModel,) if is_torch_available() else () all_generative_model_classes = (MarianMTModel,) if is_torch_available() else ()
is_encoder_decoder = True is_encoder_decoder = True
test_pruning = False test_pruning = False
test_head_masking = True
test_missing_keys = False test_missing_keys = False
def setUp(self): def setUp(self):
......
...@@ -219,7 +219,6 @@ class MBartModelTest(ModelTesterMixin, GenerationTesterMixin, unittest.TestCase) ...@@ -219,7 +219,6 @@ class MBartModelTest(ModelTesterMixin, GenerationTesterMixin, unittest.TestCase)
all_generative_model_classes = (MBartForConditionalGeneration,) if is_torch_available() else () all_generative_model_classes = (MBartForConditionalGeneration,) if is_torch_available() else ()
is_encoder_decoder = True is_encoder_decoder = True
test_pruning = False test_pruning = False
test_head_masking = True
test_missing_keys = False test_missing_keys = False
def setUp(self): def setUp(self):
......
...@@ -207,7 +207,6 @@ class PegasusModelTest(ModelTesterMixin, GenerationTesterMixin, unittest.TestCas ...@@ -207,7 +207,6 @@ class PegasusModelTest(ModelTesterMixin, GenerationTesterMixin, unittest.TestCas
all_generative_model_classes = (PegasusForConditionalGeneration,) if is_torch_available() else () all_generative_model_classes = (PegasusForConditionalGeneration,) if is_torch_available() else ()
is_encoder_decoder = True is_encoder_decoder = True
test_pruning = False test_pruning = False
test_head_masking = True
test_missing_keys = False test_missing_keys = False
def setUp(self): def setUp(self):
......
...@@ -178,7 +178,6 @@ class TFBartModelTest(TFModelTesterMixin, unittest.TestCase): ...@@ -178,7 +178,6 @@ class TFBartModelTest(TFModelTesterMixin, unittest.TestCase):
all_generative_model_classes = (TFBartForConditionalGeneration,) if is_tf_available() else () all_generative_model_classes = (TFBartForConditionalGeneration,) if is_tf_available() else ()
is_encoder_decoder = True is_encoder_decoder = True
test_pruning = False test_pruning = False
test_head_masking = True
def setUp(self): def setUp(self):
self.model_tester = TFBartModelTester(self) self.model_tester = TFBartModelTester(self)
......
...@@ -177,7 +177,6 @@ class TFBlenderbotModelTest(TFModelTesterMixin, unittest.TestCase): ...@@ -177,7 +177,6 @@ class TFBlenderbotModelTest(TFModelTesterMixin, unittest.TestCase):
all_generative_model_classes = (TFBlenderbotForConditionalGeneration,) if is_tf_available() else () all_generative_model_classes = (TFBlenderbotForConditionalGeneration,) if is_tf_available() else ()
is_encoder_decoder = True is_encoder_decoder = True
test_pruning = False test_pruning = False
test_head_masking = True
def setUp(self): def setUp(self):
self.model_tester = TFBlenderbotModelTester(self) self.model_tester = TFBlenderbotModelTester(self)
......
...@@ -179,7 +179,6 @@ class TFBlenderbotSmallModelTest(TFModelTesterMixin, unittest.TestCase): ...@@ -179,7 +179,6 @@ class TFBlenderbotSmallModelTest(TFModelTesterMixin, unittest.TestCase):
all_generative_model_classes = (TFBlenderbotSmallForConditionalGeneration,) if is_tf_available() else () all_generative_model_classes = (TFBlenderbotSmallForConditionalGeneration,) if is_tf_available() else ()
is_encoder_decoder = True is_encoder_decoder = True
test_pruning = False test_pruning = False
test_head_masking = True
def setUp(self): def setUp(self):
self.model_tester = TFBlenderbotSmallModelTester(self) self.model_tester = TFBlenderbotSmallModelTester(self)
......
...@@ -75,6 +75,7 @@ class TFModelTesterMixin: ...@@ -75,6 +75,7 @@ class TFModelTesterMixin:
all_model_classes = () all_model_classes = ()
all_generative_model_classes = () all_generative_model_classes = ()
test_resize_embeddings = True test_resize_embeddings = True
test_head_masking = True
is_encoder_decoder = False is_encoder_decoder = False
def _prepare_for_class(self, inputs_dict, model_class, return_labels=False) -> dict: def _prepare_for_class(self, inputs_dict, model_class, return_labels=False) -> dict:
......
...@@ -179,7 +179,6 @@ class TFMarianModelTest(TFModelTesterMixin, unittest.TestCase): ...@@ -179,7 +179,6 @@ class TFMarianModelTest(TFModelTesterMixin, unittest.TestCase):
all_generative_model_classes = (TFMarianMTModel,) if is_tf_available() else () all_generative_model_classes = (TFMarianMTModel,) if is_tf_available() else ()
is_encoder_decoder = True is_encoder_decoder = True
test_pruning = False test_pruning = False
test_head_masking = True
def setUp(self): def setUp(self):
self.model_tester = TFMarianModelTester(self) self.model_tester = TFMarianModelTester(self)
......
...@@ -181,7 +181,6 @@ class TFMBartModelTest(TFModelTesterMixin, unittest.TestCase): ...@@ -181,7 +181,6 @@ class TFMBartModelTest(TFModelTesterMixin, unittest.TestCase):
all_generative_model_classes = (TFMBartForConditionalGeneration,) if is_tf_available() else () all_generative_model_classes = (TFMBartForConditionalGeneration,) if is_tf_available() else ()
is_encoder_decoder = True is_encoder_decoder = True
test_pruning = False test_pruning = False
test_head_masking = True
def setUp(self): def setUp(self):
self.model_tester = TFMBartModelTester(self) self.model_tester = TFMBartModelTester(self)
......
...@@ -177,7 +177,6 @@ class TFPegasusModelTest(TFModelTesterMixin, unittest.TestCase): ...@@ -177,7 +177,6 @@ class TFPegasusModelTest(TFModelTesterMixin, unittest.TestCase):
all_generative_model_classes = (TFPegasusForConditionalGeneration,) if is_tf_available() else () all_generative_model_classes = (TFPegasusForConditionalGeneration,) if is_tf_available() else ()
is_encoder_decoder = True is_encoder_decoder = True
test_pruning = False test_pruning = False
test_head_masking = True
def setUp(self): def setUp(self):
self.model_tester = TFPegasusModelTester(self) self.model_tester = TFPegasusModelTester(self)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment