mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-06 19:40:28 +00:00
[hotfix] quick fixes to make legacy tutorials runnable (#5559)
Co-authored-by: Edenzzzz <wtan45@wisc.edu>
This commit is contained in:
@@ -1,9 +1,9 @@
|
||||
import torch
|
||||
import torch.nn as nn
|
||||
|
||||
from colossalai.kernel.cuda_native import LayerNorm
|
||||
from colossalai.kernel.jit import bias_dropout_add_fused_inference, bias_dropout_add_fused_train
|
||||
from colossalai.legacy.nn.layer.parallel_sequence import TransformerSelfAttentionRing
|
||||
from colossalai.nn.layer.layernorm import MixedFusedLayerNorm as LayerNorm
|
||||
|
||||
from .dropout import get_bias_dropout_add
|
||||
from .mlp import TransformerMLP
|
||||
|
@@ -48,7 +48,7 @@ def pipeline_data_process_func(stage_output, micro_batch_data):
|
||||
def main():
|
||||
# initialize
|
||||
parse_args()
|
||||
colossalai.launch_from_torch(config="./config.py", seed=1234, backend="nccl")
|
||||
colossalai.legacy.launch_from_torch(config="./config.py", seed=1234, backend="nccl")
|
||||
|
||||
logger = get_dist_logger()
|
||||
|
||||
@@ -136,7 +136,7 @@ def main():
|
||||
logger.info(f"LR Scheduler is built with {warmup_steps} warmup steps and {gpc.config.DECAY_ITERS} decay steps")
|
||||
|
||||
# # init
|
||||
engine, *dummy = colossalai.initialize(model, optimizer, criterion, verbose=True)
|
||||
engine, *dummy = colossalai.legacy.initialize(model, optimizer, criterion, verbose=True)
|
||||
|
||||
# build timer
|
||||
timer = MultiTimer()
|
||||
|
Reference in New Issue
Block a user