From 71b75902c66a108d4cd9881d64e5b63a8b617ea8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Quentin=20Gallou=C3=A9dec?= Date: Wed, 3 Sep 2025 16:48:36 +0000 Subject: [PATCH] Fix backward compatibility with accelerate in Trainer --- src/transformers/trainer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/transformers/trainer.py b/src/transformers/trainer.py index e1948205264a..f808c67ac308 100755 --- a/src/transformers/trainer.py +++ b/src/transformers/trainer.py @@ -5626,7 +5626,7 @@ def get_batch_samples( # In the DataParallel case, convert the scalar tensor into a 1-dim tensor num_items_in_batch = num_items_in_batch.unsqueeze(0) # Divide by number of devices with the same batch - if pc := self.accelerator.parallelism_config: + if pc := getattr(self.accelerator, "parallelism_config", None): num_items_in_batch = num_items_in_batch // pc.non_data_parallel_size return batch_samples, num_items_in_batch