mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-06-26 07:22:12 +00:00
fix
This commit is contained in:
parent
b38d45ee51
commit
c0811d7342
@ -26,7 +26,7 @@ def check_layer(rank, world_size, port):
|
||||
dist.all_reduce(tensor, op=ReduceOp.SUM, group=pg)
|
||||
assert tensor.equal(tensor_to_check)
|
||||
|
||||
|
||||
@pytest.mark.skip("tested in corresponding sharderformer")
|
||||
@pytest.mark.dist
|
||||
@rerun_if_address_is_in_use()
|
||||
def test_logical_pg():
|
||||
|
@ -1,4 +1,3 @@
|
||||
import pytest
|
||||
import torch
|
||||
import torch.distributed as dist
|
||||
from torch.distributed.distributed_c10d import _get_default_group
|
||||
@ -37,7 +36,6 @@ def run_dist(rank, world_size, port):
|
||||
check_4gpu()
|
||||
|
||||
|
||||
@pytest.mark.skip("tested in corresponding sharderformer")
|
||||
@rerun_if_address_is_in_use()
|
||||
def test_all_gather():
|
||||
spawn(run_dist, 4)
|
||||
|
Loading…
Reference in New Issue
Block a user