From dc60efe1545b4eb9fa84ba2816d45af499f22b40 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 11 Apr 2025 03:22:25 +0000 Subject: [PATCH] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- tests/test_booster/test_mixed_precision/test_fp16_torch.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/test_booster/test_mixed_precision/test_fp16_torch.py b/tests/test_booster/test_mixed_precision/test_fp16_torch.py index 09ec1b88f..1d4a5c0d8 100644 --- a/tests/test_booster/test_mixed_precision/test_fp16_torch.py +++ b/tests/test_booster/test_mixed_precision/test_fp16_torch.py @@ -1,3 +1,4 @@ +import pytest import torch from torch.optim import Adam @@ -5,7 +6,6 @@ import colossalai from colossalai.booster.mixed_precision import FP16TorchMixedPrecision from colossalai.testing import rerun_if_address_is_in_use, spawn from tests.kit.model_zoo import model_zoo -import pytest def run_torch_amp(rank, world_size, port): @@ -35,6 +35,7 @@ def run_torch_amp(rank, world_size, port): optimizer.step() del model, optimizer, criterion, data, output, mixed_precision + @pytest.mark.skip(reason="Skip because assertion may fail for CI devices") @rerun_if_address_is_in_use() def test_torch_ddp_plugin():