Unverified Commit 3b720ad9 authored by Yih-Dar's avatar Yih-Dar Committed by GitHub
Browse files

mark `test_initialization` as flaky in 2 model tests (#27906)



fix
Co-authored-by: default avatarydshieh <ydshieh@users.noreply.github.com>
parent 7f07c356
...@@ -19,6 +19,7 @@ import unittest ...@@ -19,6 +19,7 @@ import unittest
from transformers import Dinov2Config from transformers import Dinov2Config
from transformers.testing_utils import ( from transformers.testing_utils import (
is_flaky,
require_torch, require_torch,
require_vision, require_vision,
slow, slow,
...@@ -230,6 +231,10 @@ class Dinov2ModelTest(ModelTesterMixin, PipelineTesterMixin, unittest.TestCase): ...@@ -230,6 +231,10 @@ class Dinov2ModelTest(ModelTesterMixin, PipelineTesterMixin, unittest.TestCase):
self.model_tester = Dinov2ModelTester(self) self.model_tester = Dinov2ModelTester(self)
self.config_tester = ConfigTester(self, config_class=Dinov2Config, has_text_modality=False, hidden_size=37) self.config_tester = ConfigTester(self, config_class=Dinov2Config, has_text_modality=False, hidden_size=37)
@is_flaky(max_attempts=3, description="`torch.nn.init.trunc_normal_` is flaky.")
def test_initialization(self):
super().test_initialization()
def test_config(self): def test_config(self):
self.config_tester.run_common_tests() self.config_tester.run_common_tests()
......
...@@ -18,7 +18,7 @@ ...@@ -18,7 +18,7 @@
import unittest import unittest
from transformers import VitDetConfig from transformers import VitDetConfig
from transformers.testing_utils import require_torch, torch_device from transformers.testing_utils import is_flaky, require_torch, torch_device
from transformers.utils import is_torch_available from transformers.utils import is_torch_available
from ...test_backbone_common import BackboneTesterMixin from ...test_backbone_common import BackboneTesterMixin
...@@ -175,6 +175,10 @@ class VitDetModelTest(ModelTesterMixin, PipelineTesterMixin, unittest.TestCase): ...@@ -175,6 +175,10 @@ class VitDetModelTest(ModelTesterMixin, PipelineTesterMixin, unittest.TestCase):
self.model_tester = VitDetModelTester(self) self.model_tester = VitDetModelTester(self)
self.config_tester = ConfigTester(self, config_class=VitDetConfig, has_text_modality=False, hidden_size=37) self.config_tester = ConfigTester(self, config_class=VitDetConfig, has_text_modality=False, hidden_size=37)
@is_flaky(max_attempts=3, description="`torch.nn.init.trunc_normal_` is flaky.")
def test_initialization(self):
super().test_initialization()
# TODO: Fix me (once this model gets more usage) # TODO: Fix me (once this model gets more usage)
@unittest.skip("Does not work on the tiny model as we keep hitting edge cases.") @unittest.skip("Does not work on the tiny model as we keep hitting edge cases.")
def test_cpu_offload(self): def test_cpu_offload(self):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment