mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-06 19:40:28 +00:00
[moe] full test for deepseek and mixtral (pp + sp to fix)
This commit is contained in:
@@ -9,7 +9,7 @@ import torch.nn.functional as F
|
||||
|
||||
from colossalai.legacy.moe.load_balance import LoadBalancer
|
||||
from colossalai.legacy.moe.utils import create_ep_hierarchical_group, get_noise_generator
|
||||
from colossalai.moe.operators import AllGather, AllToAll, HierarchicalAllToAll, MoeCombine, MoeDispatch, ReduceScatter
|
||||
from colossalai.moe._operation import AllGather, AllToAll, HierarchicalAllToAll, MoeCombine, MoeDispatch, ReduceScatter
|
||||
from colossalai.shardformer.layer.moe import MLPExperts
|
||||
from colossalai.tensor.moe_tensor.api import get_dp_group, get_ep_group, get_ep_group_ranks, get_ep_size
|
||||
|
||||
|
Reference in New Issue
Block a user