Unverified Commit f0aeb1be authored by amyeroberts's avatar amyeroberts Committed by GitHub
Browse files

Skip flaky NLLB Moe test for now (#22463)

Skip flaky test for now
parent 154c6bb7
...@@ -21,6 +21,7 @@ import unittest ...@@ -21,6 +21,7 @@ import unittest
from transformers import NllbMoeConfig, is_torch_available, set_seed from transformers import NllbMoeConfig, is_torch_available, set_seed
from transformers.testing_utils import ( from transformers.testing_utils import (
is_flaky,
require_sentencepiece, require_sentencepiece,
require_tokenizers, require_tokenizers,
require_torch, require_torch,
...@@ -281,6 +282,7 @@ class NllbMoeModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMi ...@@ -281,6 +282,7 @@ class NllbMoeModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMi
model2, info = model_class.from_pretrained(tmpdirname, output_loading_info=True) model2, info = model_class.from_pretrained(tmpdirname, output_loading_info=True)
self.assertEqual(info["missing_keys"], []) self.assertEqual(info["missing_keys"], [])
@is_flaky()
def test_decoder_model_past_with_large_inputs(self): def test_decoder_model_past_with_large_inputs(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs() config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_decoder_model_past_large_inputs(*config_and_inputs) self.model_tester.create_and_check_decoder_model_past_large_inputs(*config_and_inputs)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment