[MOE] polish moe_env (#467)

This commit is contained in:
HELSON
2022-03-19 15:36:25 +08:00
committed by GitHub
parent bccbc15861
commit aff9d354f7
8 changed files with 96 additions and 90 deletions

View File

@@ -1,6 +1,6 @@
import torch.nn as nn
import torch.distributed as dist
from colossalai.core import global_context as gpc, moe_context as moe_env
from colossalai.core import global_context as gpc, MOE_CONTEXT
from colossalai.context import ParallelMode
from .common import is_using_ddp
from typing import Dict, List
@@ -45,7 +45,7 @@ def sync_moe_model_param(model: nn.Module):
for ep_size in param_dict:
# When ep_size = world_size, communication is not needed
if ep_size != 1 and ep_size != moe_env.world_size:
src_rank = dist.get_rank(moe_env.information[ep_size].ep_group)
if ep_size != 1 and ep_size != MOE_CONTEXT.world_size:
src_rank = dist.get_rank(MOE_CONTEXT.information[ep_size].ep_group)
for param in param_dict[ep_size]:
dist.broadcast(param, src=src_rank, group=param.moe_info.dp_group)