mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-09 04:50:17 +00:00
[test] refactor tests with spawn (#3452)
* [test] added spawn decorator * polish code * polish code * polish code * polish code * polish code * polish code
This commit is contained in:
@@ -1,27 +1,14 @@
|
||||
from functools import partial
|
||||
|
||||
import torch.multiprocessing as mp
|
||||
import torch.nn as nn
|
||||
|
||||
from colossalai.booster.accelerator import Accelerator
|
||||
from colossalai.testing import parameterize, rerun_if_address_is_in_use
|
||||
from colossalai.testing import clear_cache_before_run, parameterize
|
||||
|
||||
|
||||
@clear_cache_before_run()
|
||||
@parameterize('device', ['cpu', 'cuda'])
|
||||
def run_accelerator(device):
|
||||
def test_accelerator(device):
|
||||
acceleartor = Accelerator(device)
|
||||
model = nn.Linear(8, 8)
|
||||
model = acceleartor.configure_model(model)
|
||||
assert next(model.parameters()).device.type == device
|
||||
del model, acceleartor
|
||||
|
||||
|
||||
def run_dist(rank):
|
||||
run_accelerator()
|
||||
|
||||
|
||||
@rerun_if_address_is_in_use()
|
||||
def test_accelerator():
|
||||
world_size = 1
|
||||
run_func = partial(run_dist)
|
||||
mp.spawn(run_func, nprocs=world_size)
|
||||
|
Reference in New Issue
Block a user