mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-12-09 13:18:42 +00:00
add interleaved pipeline, fix naive amp and update pipeline model initializer (#80)
This commit is contained in:
@@ -8,7 +8,7 @@ import torch
|
||||
import torch.multiprocessing as mp
|
||||
import model
|
||||
|
||||
from colossalai.builder import PipelineModelInitializer
|
||||
from colossalai.builder import build_pipeline_model_from_cfg
|
||||
from colossalai.communication import p2p as p2p_communication
|
||||
from colossalai.communication.utils import send_tensor_meta, recv_tensor_meta
|
||||
from colossalai.context.parallel_mode import ParallelMode
|
||||
@@ -39,7 +39,7 @@ def run_schedule(rank, world_size):
|
||||
backend='nccl')
|
||||
|
||||
# build model
|
||||
model = PipelineModelInitializer(gpc.config.model, 1).initialize()
|
||||
model = build_pipeline_model_from_cfg(gpc.config.model, 1)
|
||||
print_rank_0('model is created')
|
||||
|
||||
train_dataset = CIFAR10(
|
||||
|
||||
Reference in New Issue
Block a user