From 3b4f4779587b605e0a1bb81045a146c4125dd02f Mon Sep 17 00:00:00 2001 From: Dean Wyatte Date: Wed, 20 Dec 2023 19:19:21 +0000 Subject: [PATCH] disable retain_grad_hidden_states_attentions on SeamlessM4TModelWithTextInputTest --- tests/models/seamless_m4t/test_modeling_seamless_m4t.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/tests/models/seamless_m4t/test_modeling_seamless_m4t.py b/tests/models/seamless_m4t/test_modeling_seamless_m4t.py index 68979202d46e..365775171e8c 100644 --- a/tests/models/seamless_m4t/test_modeling_seamless_m4t.py +++ b/tests/models/seamless_m4t/test_modeling_seamless_m4t.py @@ -751,6 +751,12 @@ def test_training_gradient_checkpointing_use_reentrant(self): def test_training_gradient_checkpointing_use_reentrant_false(self): pass + @unittest.skip( + reason="In training model, the first encoder layer is sometimes skipped. Training is not supported yet, so the test is ignored." + ) + def test_retain_grad_hidden_states_attentions(self): + pass + @require_torch class SeamlessM4TGenerationTest(unittest.TestCase):