[test] refactor tests with spawn (#3452)

* [test] added spawn decorator

* polish code

* polish code

* polish code

* polish code

* polish code

* polish code
This commit is contained in:
Frank Lee
2023-04-06 14:51:35 +08:00
committed by GitHub
parent 62f4e2eb07
commit 80eba05b0a
240 changed files with 1723 additions and 2342 deletions

View File

@@ -1,7 +1,9 @@
import pytest
import torch
from tests.components_to_test.registry import non_distributed_component_funcs
from colossalai.nn.optimizer import CPUAdam, HybridAdam
from colossalai.testing import clear_cache_before_run, parameterize
from tests.components_to_test.registry import non_distributed_component_funcs
def move_some_params_to_cuda(model, torch_model):
@@ -16,9 +18,10 @@ def check_params_equal(model, torch_model):
assert torch.allclose(p, torch_p, atol=1e-3), f'diff: {torch.abs(p - torch_p)}'
@pytest.mark.parametrize('nvme_offload_fraction', [0.0, 0.5, 1.0])
@pytest.mark.parametrize('nvme_offload_dir', ['./offload', None])
@pytest.mark.parametrize('adam_cls', [CPUAdam, HybridAdam])
@clear_cache_before_run()
@parameterize('nvme_offload_fraction', [0.0, 0.5, 1.0])
@parameterize('nvme_offload_dir', ['./offload', None])
@parameterize('adam_cls', [CPUAdam, HybridAdam])
def test_nvme_adam(nvme_offload_fraction, nvme_offload_dir, adam_cls):
get_components_func = non_distributed_component_funcs.get_callable('simple_net')
model_builder, train_dataloader, test_dataloader, optimizer_class, criterion = get_components_func()