mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-05 02:51:59 +00:00
[polish] polish singleton and global context (#500)
This commit is contained in:
@@ -8,7 +8,7 @@ from colossalai.context import ParallelMode
|
||||
from colossalai.core import global_context as gpc
|
||||
from colossalai.utils import free_port, get_current_device
|
||||
from colossalai.nn.layer.moe import Top1Router, Top2Router, MoeLayer, Experts
|
||||
from colossalai.core import MOE_CONTEXT
|
||||
from colossalai.context.moe_context import MOE_CONTEXT
|
||||
|
||||
BATCH_SIZE = 16
|
||||
NUM_EXPERTS = 4
|
||||
|
Reference in New Issue
Block a user