mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-08-31 16:40:41 +00:00
[feat] refactored extension module (#5298)
* [feat] refactored extension module * polish * polish * polish * polish * polish * polish * polish * polish * polish * polish
This commit is contained in:
@@ -3,13 +3,13 @@ import inspect
|
||||
import torch
|
||||
import torch.nn as nn
|
||||
|
||||
from colossalai.kernel import LayerNorm
|
||||
from colossalai.legacy.context import ParallelMode
|
||||
from colossalai.legacy.context.parallel_mode import ParallelMode
|
||||
from colossalai.legacy.core import global_context as gpc
|
||||
from colossalai.legacy.nn.layer.wrapper import PipelineSharedModuleWrapper
|
||||
from colossalai.legacy.pipeline.utils import partition_uniform
|
||||
from colossalai.logging import get_dist_logger
|
||||
from colossalai.nn.layer.layernorm import MixedFusedLayerNorm as LayerNorm
|
||||
|
||||
from .layers import BertDualHead, BertLayer, Embedding, PreProcessor, VocabEmbedding
|
||||
from .layers.init_method import init_normal, output_init_normal
|
||||
|
@@ -3,9 +3,9 @@ import torch.nn as nn
|
||||
import torch.nn.functional as F
|
||||
from loss_func.cross_entropy import vocab_cross_entropy
|
||||
|
||||
from colossalai.kernel import LayerNorm
|
||||
from colossalai.legacy.context import ParallelMode
|
||||
from colossalai.legacy.core import global_context as gpc
|
||||
from colossalai.nn.layer.layernorm import MixedFusedLayerNorm as LayerNorm
|
||||
|
||||
from .linear import Linear
|
||||
from .pooler import Pooler
|
||||
|
@@ -8,12 +8,12 @@ from lr_scheduler import AnnealingLR
|
||||
from model.bert import BertForPretrain, build_pipeline_bert
|
||||
|
||||
import colossalai
|
||||
from colossalai.kernel import LayerNorm
|
||||
from colossalai.legacy.amp import AMP_TYPE
|
||||
from colossalai.legacy.context.parallel_mode import ParallelMode
|
||||
from colossalai.legacy.core import global_context as gpc
|
||||
from colossalai.legacy.utils import is_using_pp
|
||||
from colossalai.logging import get_dist_logger
|
||||
from colossalai.nn.layer.layernorm import MixedFusedLayerNorm as LayerNorm
|
||||
from colossalai.nn.optimizer import FusedAdam
|
||||
from colossalai.utils import MultiTimer
|
||||
|
||||
|
Reference in New Issue
Block a user