"git@developer.sourcefind.cn:chenpangpang/transformers.git" did not exist on "52a46dc57bb653aa9dab440e4bb70988b15cdc7e"
Unverified Commit e268d7e5 authored by Dean Wyatte's avatar Dean Wyatte Committed by GitHub
Browse files

disable test_retain_grad_hidden_states_attentions on SeamlessM4TModelWithTextInputTest (#28169)

disable retain_grad_hidden_states_attentions on SeamlessM4TModelWithTextInputTest
parent 1d777359
...@@ -751,6 +751,12 @@ class SeamlessM4TModelWithTextInputTest( ...@@ -751,6 +751,12 @@ class SeamlessM4TModelWithTextInputTest(
def test_training_gradient_checkpointing_use_reentrant_false(self): def test_training_gradient_checkpointing_use_reentrant_false(self):
pass pass
@unittest.skip(
reason="In training model, the first encoder layer is sometimes skipped. Training is not supported yet, so the test is ignored."
)
def test_retain_grad_hidden_states_attentions(self):
pass
@require_torch @require_torch
class SeamlessM4TGenerationTest(unittest.TestCase): class SeamlessM4TGenerationTest(unittest.TestCase):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment