From 4c65e293a27fe6812a657d186b7d866b0ba33191 Mon Sep 17 00:00:00 2001 From: Yoach Lacombe Date: Fri, 14 Jun 2024 14:08:38 +0200 Subject: [PATCH] Set seed for M4T retain grad test --- tests/models/seamless_m4t/test_modeling_seamless_m4t.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/models/seamless_m4t/test_modeling_seamless_m4t.py b/tests/models/seamless_m4t/test_modeling_seamless_m4t.py index f083cba7db82..2647c2eac764 100644 --- a/tests/models/seamless_m4t/test_modeling_seamless_m4t.py +++ b/tests/models/seamless_m4t/test_modeling_seamless_m4t.py @@ -612,11 +612,11 @@ def test_attention_outputs(self): [self.model_tester.num_attention_heads, encoder_seq_length, encoder_key_length], ) - @unittest.skip( - reason="In training model, the first speech encoder layer is sometimes skipped. Training is not supported yet, so the test is ignored." - ) def test_retain_grad_hidden_states_attentions(self): - pass + # When training the model, the first speech encoder layer is sometimes skipped. + # Setting the seed to always have the first layer. + set_seed(0) + super().test_retain_grad_hidden_states_attentions() @require_torch