diff --git a/colossalai/context/parallel_context.py b/colossalai/context/parallel_context.py index ceb2064a6..6102e701a 100644 --- a/colossalai/context/parallel_context.py +++ b/colossalai/context/parallel_context.py @@ -374,7 +374,7 @@ class ParallelContext(metaclass=SingletonMeta): # None will give the default global process group for pytorch dist operations ranks = list(range(world_size)) cpu_group = dist.new_group(ranks, backend='gloo') if dist.get_backend() != 'gloo' else None - self._register_dist(rank, world_size, None, cpu_group, ranks, ParallelMode.GLOBAL) + self._register_dist(rank, world_size, dist.GroupMember.WORLD, cpu_group, ranks, ParallelMode.GLOBAL) self.add_global_rank(ParallelMode.GLOBAL, rank) def _register_dist(self, local_rank, world_size, process_group, cpu_group, ranks_in_group, mode):