[test] fixed rerun_on_exception and adapted test cases (#487)

This commit is contained in:
Frank Lee
2022-03-25 17:25:12 +08:00
committed by GitHub
parent 4d322b79da
commit 3601b2bad0
31 changed files with 143 additions and 135 deletions

View File

@@ -12,29 +12,26 @@ import torch.multiprocessing as mp
from torch.utils.data import DataLoader
import colossalai
from colossalai.builder import build_dataset, build_data_sampler, build_transform
from colossalai.builder import build_dataset, build_transform
from torchvision import transforms
from colossalai.context import ParallelMode, Config
from colossalai.core import global_context as gpc
from colossalai.utils import get_dataloader
from colossalai.utils import get_dataloader, free_port
from colossalai.testing import rerun_on_exception
CONFIG = Config(
dict(
train_data=dict(
dataset=dict(
type='CIFAR10',
root=Path(os.environ['DATA']),
train=True,
download=True,
),
dataloader=dict(
batch_size=8,
),
transform_pipeline=[
dict(type='ToTensor'),
dict(type='Normalize', mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5))
]
train_data=dict(dataset=dict(
type='CIFAR10',
root=Path(os.environ['DATA']),
train=True,
download=True,
),
dataloader=dict(batch_size=8,),
transform_pipeline=[
dict(type='ToTensor'),
dict(type='Normalize', mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5))
]),
parallel=dict(
pipeline=dict(size=1),
tensor=dict(size=1, mode=None),
@@ -43,15 +40,8 @@ CONFIG = Config(
))
def run_data_sampler(rank, world_size):
dist_args = dict(
config=CONFIG,
rank=rank,
world_size=world_size,
backend='gloo',
port='29903',
host='localhost'
)
def run_data_sampler(rank, world_size, port):
dist_args = dict(config=CONFIG, rank=rank, world_size=world_size, backend='gloo', port=port, host='localhost')
colossalai.launch(**dist_args)
print('finished initialization')
@@ -71,15 +61,16 @@ def run_data_sampler(rank, world_size):
dist.broadcast(img_to_compare, src=0, group=gpc.get_group(ParallelMode.DATA))
if gpc.get_local_rank(ParallelMode.DATA) != 0:
assert not torch.equal(img,
img_to_compare), 'Same image was distributed across ranks but expected it to be different'
assert not torch.equal(
img, img_to_compare), 'Same image was distributed across ranks but expected it to be different'
torch.cuda.empty_cache()
@pytest.mark.cpu
@rerun_on_exception(exception_type=mp.ProcessRaisedException, pattern=".*Address already in use.*")
def test_data_sampler():
world_size = 4
test_func = partial(run_data_sampler, world_size=world_size)
test_func = partial(run_data_sampler, world_size=world_size, port=free_port())
mp.spawn(test_func, nprocs=world_size)