mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-03 18:19:58 +00:00
[refactor] move chunk and chunkmgr to directory gemini (#1182)
This commit is contained in:
@@ -6,7 +6,7 @@ from colossalai.testing import rerun_if_address_is_in_use
|
||||
from colossalai.utils.cuda import get_current_device
|
||||
from colossalai.utils import free_port
|
||||
from colossalai.utils.model.colo_init_context import ColoInitContext
|
||||
from colossalai.tensor import ChunkManager
|
||||
from colossalai.gemini import ChunkManager
|
||||
from functools import partial
|
||||
from colossalai.nn.parallel import ColoDDP, ZeroDDP
|
||||
from colossalai.gemini.gemini_mgr import GeminiManager
|
||||
|
@@ -6,7 +6,7 @@ from colossalai.testing import rerun_if_address_is_in_use
|
||||
from colossalai.utils.cuda import get_current_device
|
||||
from colossalai.utils import free_port
|
||||
from colossalai.utils.model.colo_init_context import ColoInitContext
|
||||
from colossalai.tensor import ChunkManager
|
||||
from colossalai.gemini import ChunkManager
|
||||
from functools import partial
|
||||
from tests.components_to_test.registry import non_distributed_component_funcs
|
||||
from colossalai.nn.parallel import ZeroDDP, ColoDDP
|
||||
|
@@ -5,14 +5,7 @@ import torch.multiprocessing as mp
|
||||
from colossalai.testing import rerun_if_address_is_in_use
|
||||
from colossalai.utils.cuda import get_current_device
|
||||
from colossalai.utils import free_port
|
||||
from colossalai.utils.model.colo_init_context import ColoInitContext
|
||||
from colossalai.tensor import ChunkManager
|
||||
from functools import partial
|
||||
from tests.components_to_test.registry import non_distributed_component_funcs
|
||||
from colossalai.nn.parallel import ZeroDDP, ColoDDP
|
||||
from colossalai.gemini.gemini_mgr import GeminiManager
|
||||
from typing import Callable
|
||||
from collections import OrderedDict
|
||||
from colossalai.nn.parallel.reducer import Reducer
|
||||
import torch.distributed as dist
|
||||
from torch.distributed.distributed_c10d import _get_default_group
|
||||
|
@@ -4,7 +4,7 @@ import pytest
|
||||
import torch.multiprocessing as mp
|
||||
from typing import List
|
||||
from functools import partial
|
||||
from colossalai.tensor import ChunkManager
|
||||
from colossalai.gemini import ChunkManager
|
||||
from colossalai.testing import rerun_if_address_is_in_use, parameterize
|
||||
from colossalai.utils import free_port
|
||||
from colossalai.core import global_context as gpc
|
||||
|
@@ -7,7 +7,7 @@ from colossalai.testing import rerun_if_address_is_in_use
|
||||
from colossalai.utils.cuda import get_current_device
|
||||
from colossalai.utils import free_port
|
||||
from colossalai.utils.model.colo_init_context import ColoInitContext
|
||||
from colossalai.tensor import ChunkManager
|
||||
from colossalai.gemini import ChunkManager
|
||||
from colossalai.core import global_context as gpc
|
||||
from functools import partial
|
||||
from _utils import tensor_equal, set_seed, tensor_shard_equal
|
||||
|
@@ -7,13 +7,12 @@ from colossalai.testing import rerun_if_address_is_in_use
|
||||
from colossalai.utils.cuda import get_current_device
|
||||
from colossalai.utils import free_port
|
||||
from colossalai.utils.model.colo_init_context import ColoInitContext
|
||||
from colossalai.tensor import ChunkManager
|
||||
from colossalai.core import global_context as gpc
|
||||
from functools import partial
|
||||
from tests.test_tensor._utils import set_seed
|
||||
from tests.components_to_test.registry import non_distributed_component_funcs
|
||||
from colossalai.nn.parallel.data_parallel import ZeroDDP
|
||||
from colossalai.gemini import GeminiManager
|
||||
from colossalai.gemini import ChunkManager, GeminiManager
|
||||
from colossalai.testing import parameterize
|
||||
from colossalai.nn.optimizer import HybridAdam
|
||||
from colossalai.zero import ZeroOptimizer
|
||||
|
Reference in New Issue
Block a user