mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-12 12:47:21 +00:00
[test] reorganize zero/gemini tests (#3445)
This commit is contained in:
@@ -14,7 +14,7 @@ from colossalai.utils import free_port, get_current_device
|
||||
from colossalai.zero import ColoInitContext
|
||||
from tests.test_moe.test_moe_zero_init import MoeModel
|
||||
from tests.test_tensor.common_utils import debug_print
|
||||
from tests.test_zero.common import CONFIG
|
||||
from tests.test_zero.test_legacy.common import CONFIG
|
||||
|
||||
|
||||
def exam_moe_checkpoint():
|
||||
|
@@ -13,7 +13,7 @@ from colossalai.utils import free_port, get_current_device
|
||||
from colossalai.zero import ColoInitContext
|
||||
from tests.test_moe.test_moe_zero_init import MoeModel
|
||||
from tests.test_tensor.common_utils import debug_print
|
||||
from tests.test_zero.common import CONFIG
|
||||
from tests.test_zero.test_legacy.common import CONFIG
|
||||
|
||||
|
||||
@parameterize("init_device_type", ['cpu', 'cuda'])
|
||||
|
@@ -14,7 +14,7 @@ from colossalai.testing import parameterize, rerun_if_address_is_in_use
|
||||
from colossalai.utils import free_port, get_current_device
|
||||
from colossalai.zero.legacy.init_ctx import ZeroInitContext
|
||||
from colossalai.zero.legacy.shard_utils import BucketTensorShardStrategy, TensorShardStrategy
|
||||
from tests.test_zero.common import CONFIG
|
||||
from tests.test_zero.test_legacy.common import CONFIG
|
||||
|
||||
|
||||
class MoeModel(nn.Module):
|
||||
|
@@ -17,7 +17,7 @@ from colossalai.zero.legacy.sharded_model._utils import cast_tensor_to_fp16
|
||||
from colossalai.zero.legacy.sharded_model.utils import col_model_deepcopy
|
||||
from tests.components_to_test.registry import non_distributed_component_funcs
|
||||
from tests.test_moe.test_moe_zero_init import MoeModel
|
||||
from tests.test_zero.common import CONFIG, check_grads_padding, run_fwd_bwd
|
||||
from tests.test_zero.test_legacy.common import CONFIG, check_grads_padding, run_fwd_bwd
|
||||
|
||||
|
||||
@parameterize("enable_autocast", [False])
|
||||
|
@@ -20,7 +20,7 @@ from colossalai.zero.legacy.sharded_optim import ShardedOptimizerV2
|
||||
from colossalai.zero.low_level._utils import has_inf_or_nan
|
||||
from tests.components_to_test.registry import non_distributed_component_funcs
|
||||
from tests.test_moe.test_moe_zero_init import MoeModel
|
||||
from tests.test_zero.common import CONFIG, check_sharded_model_params
|
||||
from tests.test_zero.test_legacy.common import CONFIG, check_sharded_model_params
|
||||
|
||||
|
||||
def _run_step(model, optimizer, data, label, criterion, grad_handler):
|
||||
|
Reference in New Issue
Block a user