From 0b2cdc82162193178d660a0e7515314c03ff3312 Mon Sep 17 00:00:00 2001 From: Niels Date: Thu, 31 Jul 2025 15:30:29 +0200 Subject: [PATCH] Support metaclipi --- src/transformers/models/clip/modeling_clip.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/transformers/models/clip/modeling_clip.py b/src/transformers/models/clip/modeling_clip.py index a187bdaa635e..ea842d2633e7 100644 --- a/src/transformers/models/clip/modeling_clip.py +++ b/src/transformers/models/clip/modeling_clip.py @@ -634,7 +634,7 @@ def forward( last_hidden_state = encoder_outputs.last_hidden_state last_hidden_state = self.final_layer_norm(last_hidden_state) - if self.eos_token_id == 2: + if torch.all(input_ids.max(dim=-1).values == 49407).item(): # The `eos_token_id` was incorrect before PR #24773: Let's keep what have been done here. # A CLIP model with such `eos_token_id` in the config can't work correctly with extra new tokens added # ------------------------------------------------------------