From 920fe31526302bf82b7906777c6a6927d793114b Mon Sep 17 00:00:00 2001 From: Frank Lee Date: Thu, 14 Apr 2022 17:20:35 +0800 Subject: [PATCH] [compatibility] used backward-compatible API for global process group (#758) --- colossalai/context/parallel_context.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/colossalai/context/parallel_context.py b/colossalai/context/parallel_context.py index ceb2064a6..6102e701a 100644 --- a/colossalai/context/parallel_context.py +++ b/colossalai/context/parallel_context.py @@ -374,7 +374,7 @@ class ParallelContext(metaclass=SingletonMeta): # None will give the default global process group for pytorch dist operations ranks = list(range(world_size)) cpu_group = dist.new_group(ranks, backend='gloo') if dist.get_backend() != 'gloo' else None - self._register_dist(rank, world_size, None, cpu_group, ranks, ParallelMode.GLOBAL) + self._register_dist(rank, world_size, dist.GroupMember.WORLD, cpu_group, ranks, ParallelMode.GLOBAL) self.add_global_rank(ParallelMode.GLOBAL, rank) def _register_dist(self, local_rank, world_size, process_group, cpu_group, ranks_in_group, mode):