From cecfd76d1214d8f95a31ff554bfdb78f6c383ae4 Mon Sep 17 00:00:00 2001 From: Tong Li Date: Wed, 14 Aug 2024 02:25:32 +0000 Subject: [PATCH] fix sync condition --- colossalai/booster/plugin/hybrid_parallel_plugin.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/colossalai/booster/plugin/hybrid_parallel_plugin.py b/colossalai/booster/plugin/hybrid_parallel_plugin.py index d2933a4afe7f..e5acdb05172a 100644 --- a/colossalai/booster/plugin/hybrid_parallel_plugin.py +++ b/colossalai/booster/plugin/hybrid_parallel_plugin.py @@ -1326,8 +1326,10 @@ def execute_pipeline( ) # run with gradients accumulation - if model.require_grad_sync == False or ( - isinstance(optimizer, HybridParallelZeroOptimizer) and optimizer.require_grad_sync == False + if ( + model.require_grad_sync == False + or (isinstance(optimizer, HybridParallelZeroOptimizer) and optimizer.require_grad_sync == False) + or not torch.is_grad_enabled() ): return outputs