mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-08 20:40:34 +00:00
added CI for unit testing (#69)
This commit is contained in:
@@ -1,15 +1,15 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- encoding: utf-8 -*-
|
||||
|
||||
from functools import partial
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
import torch
|
||||
import torch.multiprocessing as mp
|
||||
|
||||
from colossalai import launch
|
||||
from colossalai.context.parallel_mode import ParallelMode
|
||||
from colossalai.core import global_context as gpc
|
||||
from functools import partial
|
||||
from pathlib import Path
|
||||
|
||||
CONFIG_PATH = Path(__file__).parent.joinpath('configs/parallel_2d_init.py').absolute()
|
||||
|
||||
@@ -75,6 +75,7 @@ def init_2d(rank, world_size, backend, port, host):
|
||||
check_2d_parallel_rank(rank)
|
||||
check_pipeline_parallel_rank(rank)
|
||||
gpc.destroy()
|
||||
torch.cuda.empty_cache()
|
||||
|
||||
|
||||
@pytest.mark.cpu
|
||||
@@ -86,7 +87,7 @@ def test_2d_init():
|
||||
test_fn = partial(init_2d,
|
||||
world_size=world_size,
|
||||
backend='gloo',
|
||||
port='29500',
|
||||
port='29900',
|
||||
host='localhost'
|
||||
)
|
||||
mp.spawn(test_fn, nprocs=world_size)
|
||||
|
@@ -5,6 +5,7 @@ from functools import partial
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
import torch
|
||||
import torch.multiprocessing as mp
|
||||
|
||||
from colossalai.context.parallel_mode import ParallelMode
|
||||
@@ -98,6 +99,7 @@ def init_2halfd(rank, world_size, backend, port, host):
|
||||
check_tensor_parallel_rank(rank)
|
||||
check_2p5d_parallel_rank(rank)
|
||||
gpc.destroy()
|
||||
torch.cuda.empty_cache()
|
||||
|
||||
|
||||
@pytest.mark.cpu
|
||||
@@ -109,7 +111,7 @@ def test_2halfd_init():
|
||||
test_fn = partial(init_2halfd,
|
||||
world_size=world_size,
|
||||
backend='gloo',
|
||||
port='29501',
|
||||
port='29901',
|
||||
host='localhost'
|
||||
)
|
||||
mp.spawn(test_fn, nprocs=world_size)
|
||||
|
@@ -5,8 +5,10 @@ from functools import partial
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
import torch
|
||||
import torch.multiprocessing as mp
|
||||
|
||||
|
||||
from colossalai.context.parallel_mode import ParallelMode
|
||||
from colossalai.core import global_context as gpc
|
||||
from colossalai.initialize import launch
|
||||
@@ -90,6 +92,7 @@ def init_3d(rank, world_size, backend, port, host):
|
||||
check_data_parallel_rank(rank)
|
||||
check_pipeline_parallel_rank(rank)
|
||||
gpc.destroy()
|
||||
torch.cuda.empty_cache()
|
||||
|
||||
|
||||
@pytest.mark.cpu
|
||||
@@ -101,7 +104,7 @@ def test_3d_init():
|
||||
test_fn = partial(init_3d,
|
||||
world_size=world_size,
|
||||
backend='gloo',
|
||||
port='29502',
|
||||
port='29902',
|
||||
host='localhost'
|
||||
)
|
||||
mp.spawn(test_fn, nprocs=world_size)
|
||||
|
Reference in New Issue
Block a user