Unverified Commit d9daeff2 authored by Yoach Lacombe's avatar Yoach Lacombe Committed by GitHub
Browse files

Set seed for M4T retain grad test (#31419)

parent 43ee5858
......@@ -612,11 +612,11 @@ class SeamlessM4TModelWithSpeechInputTest(ModelTesterMixin, unittest.TestCase):
[self.model_tester.num_attention_heads, encoder_seq_length, encoder_key_length],
)
@unittest.skip(
reason="In training model, the first speech encoder layer is sometimes skipped. Training is not supported yet, so the test is ignored."
)
def test_retain_grad_hidden_states_attentions(self):
pass
# When training the model, the first speech encoder layer is sometimes skipped.
# Setting the seed to always have the first layer.
set_seed(0)
super().test_retain_grad_hidden_states_attentions()
@require_torch
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment