mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-06-23 22:19:47 +00:00
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
This commit is contained in:
parent
fd69a821bb
commit
dc60efe154
@ -1,3 +1,4 @@
|
||||
import pytest
|
||||
import torch
|
||||
from torch.optim import Adam
|
||||
|
||||
@ -5,7 +6,6 @@ import colossalai
|
||||
from colossalai.booster.mixed_precision import FP16TorchMixedPrecision
|
||||
from colossalai.testing import rerun_if_address_is_in_use, spawn
|
||||
from tests.kit.model_zoo import model_zoo
|
||||
import pytest
|
||||
|
||||
|
||||
def run_torch_amp(rank, world_size, port):
|
||||
@ -35,6 +35,7 @@ def run_torch_amp(rank, world_size, port):
|
||||
optimizer.step()
|
||||
del model, optimizer, criterion, data, output, mixed_precision
|
||||
|
||||
|
||||
@pytest.mark.skip(reason="Skip because assertion may fail for CI devices")
|
||||
@rerun_if_address_is_in_use()
|
||||
def test_torch_ddp_plugin():
|
||||
|
Loading…
Reference in New Issue
Block a user