"git@developer.sourcefind.cn:chenpangpang/transformers.git" did not exist on "3254080d450ab0b848ac29bc8a388c836eff5efa"
Unverified Commit 11745b4e authored by NielsRogge's avatar NielsRogge Committed by GitHub
Browse files

[Tests] Improve test_attention_outputs (#20701)



* Improve tests

* Improve TF tests

* Apply suggestion

* Fix test
Co-authored-by: default avatarNiels Rogge <nielsrogge@Nielss-MacBook-Pro.local>
parent 722bf7ef
......@@ -159,10 +159,6 @@ class ConvNextModelTest(ModelTesterMixin, unittest.TestCase):
def create_and_test_config_common_properties(self):
return
@unittest.skip(reason="ConvNext does not output attentions")
def test_attention_outputs(self):
pass
@unittest.skip(reason="ConvNext does not use inputs_embeds")
def test_inputs_embeds(self):
pass
......
......@@ -129,6 +129,7 @@ class TFConvNextModelTest(TFModelTesterMixin, unittest.TestCase):
test_onnx = False
test_resize_embeddings = False
test_head_masking = False
has_attentions = False
def setUp(self):
self.model_tester = TFConvNextModelTester(self)
......@@ -170,10 +171,6 @@ class TFConvNextModelTest(TFModelTesterMixin, unittest.TestCase):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_model(*config_and_inputs)
@unittest.skip(reason="Model doesn't have attention layers")
def test_attention_outputs(self):
pass
@unittest.skipIf(
not is_tf_available() or len(tf.config.list_physical_devices("GPU")) == 0,
reason="TF does not support backprop for grouped convolutions on CPU.",
......
......@@ -142,10 +142,6 @@ class PoolFormerModelTest(ModelTesterMixin, unittest.TestCase):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_model(*config_and_inputs)
@unittest.skip(reason="PoolFormer does not output attentions")
def test_attention_outputs(self):
pass
@unittest.skip("PoolFormer does not use inputs_embeds")
def test_inputs_embeds(self):
pass
......
......@@ -147,10 +147,6 @@ class RegNetModelTest(ModelTesterMixin, unittest.TestCase):
def create_and_test_config_common_properties(self):
return
@unittest.skip(reason="RegNet does not output attentions")
def test_attention_outputs(self):
pass
@unittest.skip(reason="RegNet does not use inputs_embeds")
def test_inputs_embeds(self):
pass
......
......@@ -147,10 +147,6 @@ class TFRegNetModelTest(TFModelTesterMixin, unittest.TestCase):
def test_model_common_attributes(self):
pass
@unittest.skip(reason="Model doesn't have attention layers")
def test_attention_outputs(self):
pass
def test_forward_signature(self):
config, _ = self.model_tester.prepare_config_and_inputs_for_common()
......
......@@ -188,10 +188,6 @@ class ResNetModelTest(ModelTesterMixin, unittest.TestCase):
def create_and_test_config_common_properties(self):
return
@unittest.skip(reason="ResNet does not output attentions")
def test_attention_outputs(self):
pass
@unittest.skip(reason="ResNet does not use inputs_embeds")
def test_inputs_embeds(self):
pass
......
......@@ -150,10 +150,6 @@ class TFResNetModelTest(TFModelTesterMixin, unittest.TestCase):
def test_inputs_embeds(self):
pass
@unittest.skip(reason="ResNet does not output attentions")
def test_attention_outputs(self):
pass
@unittest.skip(reason="ResNet does not support input and output embeddings")
def test_model_common_attributes(self):
pass
......
......@@ -144,10 +144,6 @@ class VanModelTest(ModelTesterMixin, unittest.TestCase):
def create_and_test_config_common_properties(self):
return
@unittest.skip(reason="Van does not output attentions")
def test_attention_outputs(self):
pass
@unittest.skip(reason="Van does not use inputs_embeds")
def test_inputs_embeds(self):
pass
......
......@@ -554,6 +554,9 @@ class ModelTesterMixin:
loss.backward()
def test_attention_outputs(self):
if not self.has_attentions:
self.skipTest(reason="Model does not output attentions")
config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
config.return_dict = True
......
......@@ -830,6 +830,9 @@ class TFModelTesterMixin:
self.assertLess(np.sum(np.abs(output_dict - output_keywords)), 1e-6)
def test_attention_outputs(self):
if not self.has_attentions:
self.skipTest(reason="Model does not output attentions")
config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
config.return_dict = True
decoder_seq_length = getattr(self.model_tester, "decoder_seq_length", self.model_tester.seq_length)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment