mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-06 19:40:28 +00:00
[legacy] clean up legacy code (#4743)
* [legacy] remove outdated codes of pipeline (#4692) * [legacy] remove cli of benchmark and update optim (#4690) * [legacy] remove cli of benchmark and update optim * [doc] fix cli doc test * [legacy] fix engine clip grad norm * [legacy] remove outdated colo tensor (#4694) * [legacy] remove outdated colo tensor * [test] fix test import * [legacy] move outdated zero to legacy (#4696) * [legacy] clean up utils (#4700) * [legacy] clean up utils * [example] update examples * [legacy] clean up amp * [legacy] fix amp module * [legacy] clean up gpc (#4742) * [legacy] clean up context * [legacy] clean core, constants and global vars * [legacy] refactor initialize * [example] fix examples ci * [example] fix examples ci * [legacy] fix tests * [example] fix gpt example * [example] fix examples ci * [devops] fix ci installation * [example] fix examples ci
This commit is contained in:
@@ -4,7 +4,8 @@ import torch
|
||||
import torch.nn.functional as F
|
||||
|
||||
from colossalai.legacy.nn._ops._utils import dual_all_to_all
|
||||
from colossalai.tensor import ColoParameter, ColoTensor, ColoTensorSpec, ComputePattern, ProcessGroup, ShardSpec
|
||||
from colossalai.legacy.tensor import ColoTensorSpec, ComputePattern, ProcessGroup, ShardSpec
|
||||
from colossalai.tensor import ColoParameter, ColoTensor
|
||||
|
||||
from .cache_mgr import CachedParamMgr, EvictionStrategy
|
||||
from .cached_embedding import CachedEmbeddingBag
|
||||
|
@@ -6,7 +6,7 @@ import torch.distributed as dist
|
||||
import torch.nn.functional as F
|
||||
|
||||
from colossalai.legacy.nn._ops._utils import dual_all_to_all_tablewise
|
||||
from colossalai.tensor import ProcessGroup
|
||||
from colossalai.legacy.tensor import ProcessGroup
|
||||
|
||||
from .cache_mgr import EvictionStrategy
|
||||
from .cached_embedding import CachedEmbeddingBag
|
||||
|
@@ -7,7 +7,7 @@ import torch.nn as nn
|
||||
from torch.profiler import record_function
|
||||
|
||||
from colossalai.legacy.nn._ops._utils import dual_all_to_all_tablewise
|
||||
from colossalai.tensor import ProcessGroup
|
||||
from colossalai.legacy.tensor import ProcessGroup
|
||||
|
||||
from .cache_mgr import EvictionStrategy
|
||||
from .cached_embedding import CachedEmbeddingBag
|
||||
|
@@ -1,7 +1,7 @@
|
||||
from typing import Dict, List
|
||||
|
||||
from colossalai.tensor import ComputePattern
|
||||
from colossalai.tensor.distspec import _DistSpec
|
||||
from colossalai.legacy.tensor import ComputePattern
|
||||
from colossalai.legacy.tensor.distspec import _DistSpec
|
||||
|
||||
|
||||
class ColoModule(object):
|
||||
|
@@ -1,4 +1,4 @@
|
||||
from colossalai.tensor import ComputePattern, ProcessGroup, ShardSpec, distspec
|
||||
from colossalai.legacy.tensor import ComputePattern, ProcessGroup, ShardSpec, distspec
|
||||
|
||||
from .colo_module import ColoModule
|
||||
|
||||
|
@@ -1,4 +1,4 @@
|
||||
from colossalai.tensor import ComputePattern, ProcessGroup, ShardSpec, distspec
|
||||
from colossalai.legacy.tensor import ComputePattern, ProcessGroup, ShardSpec, distspec
|
||||
|
||||
from .colo_module import ColoModule
|
||||
|
||||
|
@@ -2,7 +2,8 @@ from typing import Dict
|
||||
|
||||
import torch
|
||||
|
||||
from colossalai.tensor import ColoParameter, ComputeSpec, ProcessGroup, distspec
|
||||
from colossalai.legacy.tensor import ComputeSpec, ProcessGroup, distspec
|
||||
from colossalai.tensor import ColoParameter
|
||||
|
||||
from . import ColoModule
|
||||
|
||||
|
Reference in New Issue
Block a user