mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-02 01:28:31 +00:00
Merge branch 'main' into feature/shardformer
This commit is contained in:
@@ -6,7 +6,7 @@ import torch
|
||||
from torch.utils.data import Dataset
|
||||
from transformers import GPT2Tokenizer
|
||||
|
||||
from colossalai.registry import DATASETS
|
||||
from colossalai.legacy.registry import DATASETS
|
||||
|
||||
|
||||
@DATASETS.register_module
|
||||
|
@@ -8,11 +8,11 @@ from torch.nn.parameter import Parameter
|
||||
|
||||
from colossalai.context import ParallelMode, seed
|
||||
from colossalai.core import global_context as gpc
|
||||
from colossalai.legacy.registry import LAYERS, LOSSES, MODELS
|
||||
from colossalai.nn.layer.base_layer import ParallelLayer
|
||||
from colossalai.nn.layer.parallel_1d._utils import gather_forward_split_backward, reduce_grad, reduce_input
|
||||
from colossalai.nn.layer.parallel_1d.layers import Linear1D_Row
|
||||
from colossalai.nn.layer.utils import divide
|
||||
from colossalai.registry import LAYERS, LOSSES, MODELS
|
||||
from colossalai.utils import get_current_device
|
||||
|
||||
|
||||
|
@@ -10,9 +10,9 @@ import colossalai
|
||||
import colossalai.utils as utils
|
||||
from colossalai.context.parallel_mode import ParallelMode
|
||||
from colossalai.core import global_context as gpc
|
||||
from colossalai.legacy.trainer import Trainer, hooks
|
||||
from colossalai.logging import disable_existing_loggers, get_dist_logger
|
||||
from colossalai.nn import LinearWarmupLR
|
||||
from colossalai.trainer import Trainer, hooks
|
||||
from colossalai.utils import colo_set_process_memory_fraction, is_using_pp
|
||||
from colossalai.utils.timer import MultiTimer
|
||||
from colossalai.zero.legacy.init_ctx import ZeroInitContext
|
||||
|
Reference in New Issue
Block a user