mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-06-24 14:33:20 +00:00
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
This commit is contained in:
parent
fd69a821bb
commit
dc60efe154
@ -1,3 +1,4 @@
|
|||||||
|
import pytest
|
||||||
import torch
|
import torch
|
||||||
from torch.optim import Adam
|
from torch.optim import Adam
|
||||||
|
|
||||||
@ -5,7 +6,6 @@ import colossalai
|
|||||||
from colossalai.booster.mixed_precision import FP16TorchMixedPrecision
|
from colossalai.booster.mixed_precision import FP16TorchMixedPrecision
|
||||||
from colossalai.testing import rerun_if_address_is_in_use, spawn
|
from colossalai.testing import rerun_if_address_is_in_use, spawn
|
||||||
from tests.kit.model_zoo import model_zoo
|
from tests.kit.model_zoo import model_zoo
|
||||||
import pytest
|
|
||||||
|
|
||||||
|
|
||||||
def run_torch_amp(rank, world_size, port):
|
def run_torch_amp(rank, world_size, port):
|
||||||
@ -35,6 +35,7 @@ def run_torch_amp(rank, world_size, port):
|
|||||||
optimizer.step()
|
optimizer.step()
|
||||||
del model, optimizer, criterion, data, output, mixed_precision
|
del model, optimizer, criterion, data, output, mixed_precision
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skip(reason="Skip because assertion may fail for CI devices")
|
@pytest.mark.skip(reason="Skip because assertion may fail for CI devices")
|
||||||
@rerun_if_address_is_in_use()
|
@rerun_if_address_is_in_use()
|
||||||
def test_torch_ddp_plugin():
|
def test_torch_ddp_plugin():
|
||||||
|
Loading…
Reference in New Issue
Block a user