mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-21 01:24:04 +00:00
[MOE] polish moe_env (#467)
This commit is contained in:
@@ -1,6 +1,6 @@
|
||||
import torch.nn as nn
|
||||
import torch.distributed as dist
|
||||
from colossalai.core import global_context as gpc, moe_context as moe_env
|
||||
from colossalai.core import global_context as gpc, MOE_CONTEXT
|
||||
from colossalai.context import ParallelMode
|
||||
from .common import is_using_ddp
|
||||
from typing import Dict, List
|
||||
@@ -45,7 +45,7 @@ def sync_moe_model_param(model: nn.Module):
|
||||
|
||||
for ep_size in param_dict:
|
||||
# When ep_size = world_size, communication is not needed
|
||||
if ep_size != 1 and ep_size != moe_env.world_size:
|
||||
src_rank = dist.get_rank(moe_env.information[ep_size].ep_group)
|
||||
if ep_size != 1 and ep_size != MOE_CONTEXT.world_size:
|
||||
src_rank = dist.get_rank(MOE_CONTEXT.information[ep_size].ep_group)
|
||||
for param in param_dict[ep_size]:
|
||||
dist.broadcast(param, src=src_rank, group=param.moe_info.dp_group)
|
||||
|
Reference in New Issue
Block a user