mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2026-05-12 17:53:03 +00:00
[test] refactored with the new rerun decorator (#763)
* [test] refactored with the new rerun decorator * polish test case
This commit is contained in:
@@ -6,6 +6,7 @@ from colossalai.zero.sharded_param import (StatefulTensor, colo_tensor_mem_usage
|
||||
colo_model_data_tensor_move_inline, colo_model_data_move_to_cpu,
|
||||
colo_model_tensor_clone)
|
||||
from colossalai.utils import free_port
|
||||
from colossalai.testing import rerun_if_address_is_in_use
|
||||
|
||||
import torch
|
||||
|
||||
@@ -84,6 +85,7 @@ def run_dist(rank, world_size, port):
|
||||
|
||||
@pytest.mark.dist
|
||||
@pytest.mark.parametrize("world_size", [4, 5])
|
||||
@rerun_if_address_is_in_use()
|
||||
def test_zero_tensor_utils(world_size):
|
||||
run_func = partial(run_dist, world_size=world_size, port=free_port())
|
||||
mp.spawn(run_func, nprocs=world_size)
|
||||
|
||||
Reference in New Issue
Block a user