mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-05 11:02:05 +00:00
[legacy] move engine to legacy (#4560)
* [legacy] move engine to legacy * [example] fix seq parallel example * [example] fix seq parallel example * [test] test gemini pluging hang * [test] test gemini pluging hang * [test] test gemini pluging hang * [test] test gemini pluging hang * [test] test gemini pluging hang * [example] update seq parallel requirements
This commit is contained in:
@@ -5,7 +5,7 @@ import torch.nn as nn
|
||||
|
||||
import colossalai
|
||||
from colossalai.context.moe_context import MOE_CONTEXT
|
||||
from colossalai.engine.gradient_handler import MoeGradientHandler
|
||||
from colossalai.legacy.engine.gradient_handler import MoeGradientHandler
|
||||
from colossalai.nn.layer.moe import Experts, MoeLayer, Top1Router, UniformNoiseGenerator
|
||||
from colossalai.testing import assert_equal_in_group, rerun_if_address_is_in_use, spawn
|
||||
from colossalai.utils import get_current_device
|
||||
|
Reference in New Issue
Block a user