From a9879b89614b44f45aeabc1711ddb5fb49863013 Mon Sep 17 00:00:00 2001 From: Olatunji Ruwase Date: Thu, 29 Apr 2021 13:43:21 +0000 Subject: [PATCH] Handle Norm allreduce when no mp --- deepspeed/runtime/zero/stage3.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deepspeed/runtime/zero/stage3.py b/deepspeed/runtime/zero/stage3.py index 2b16887ff60d..dafd2566dde6 100755 --- a/deepspeed/runtime/zero/stage3.py +++ b/deepspeed/runtime/zero/stage3.py @@ -2403,7 +2403,7 @@ def _model_parallel_all_reduce(self, tensor, op): """ Perform all reduce within model parallel group, if any. """ if self.model_parallel_group is None: - torch.distributed.all_reduce(tensor=tensor, op=op) + pass else: torch.distributed.all_reduce(tensor=tensor, op=op,