[pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci
This commit is contained in:
pre-commit-ci[bot] 2025-04-10 04:58:49 +00:00
parent 5c56a7fd7b
commit 6997862a91
6 changed files with 7 additions and 7 deletions

View File

@ -6,7 +6,7 @@ from torch.testing import assert_close
from colossalai import launch
from colossalai.accelerator import get_accelerator
from colossalai.quantization.fp8 import all_to_all_single_fp8
from colossalai.testing import parameterize, rerun_if_address_is_in_use, spawn, clear_cache_before_run
from colossalai.testing import clear_cache_before_run, parameterize, rerun_if_address_is_in_use, spawn
@clear_cache_before_run()

View File

@ -6,7 +6,7 @@ from torch.testing import assert_close
from colossalai import launch
from colossalai.accelerator import get_accelerator
from colossalai.quantization.fp8 import _all_to_all_fp8
from colossalai.testing import parameterize, rerun_if_address_is_in_use, spawn, clear_cache_before_run
from colossalai.testing import clear_cache_before_run, parameterize, rerun_if_address_is_in_use, spawn
@clear_cache_before_run()

View File

@ -6,7 +6,7 @@ from torch.testing import assert_close
from colossalai import launch
from colossalai.accelerator import get_accelerator
from colossalai.quantization.fp8 import all_to_all_single_fp8
from colossalai.testing import parameterize, rerun_if_address_is_in_use, spawn, clear_cache_before_run
from colossalai.testing import clear_cache_before_run, parameterize, rerun_if_address_is_in_use, spawn
dist.all_to_all_single

View File

@ -3,8 +3,7 @@ from torch.testing import assert_close
from colossalai.accelerator import get_accelerator
from colossalai.quantization.fp8 import cast_from_fp8, cast_from_fp8_pipeline, cast_to_fp8, cast_to_fp8_pipeline
from colossalai.testing import parameterize, clear_cache_before_run
from colossalai.testing import clear_cache_before_run, parameterize
@clear_cache_before_run()

View File

@ -8,7 +8,7 @@ from torch.distributed.fsdp import FullyShardedDataParallel as FSDP
from torch.testing import assert_close
from colossalai import launch
from colossalai.testing import parameterize, rerun_if_address_is_in_use, spawn, clear_cache_before_run
from colossalai.testing import clear_cache_before_run, parameterize, rerun_if_address_is_in_use, spawn
# example modified from https://pytorch.org/tutorials/intermediate/ddp_tutorial.html
@ -27,6 +27,7 @@ class ToyModel(nn.Module):
def forward(self, x):
return self.net2(self.relu(self.net1(x)))
@clear_cache_before_run()
@parameterize("mode", ["grad", "params"])
def run_model(mode):

View File

@ -6,7 +6,7 @@ from torch.testing import assert_close
from colossalai import launch
from colossalai.accelerator import get_accelerator
from colossalai.quantization.fp8 import reduce_scatter_fp8
from colossalai.testing import parameterize, rerun_if_address_is_in_use, spawn, clear_cache_before_run
from colossalai.testing import clear_cache_before_run, parameterize, rerun_if_address_is_in_use, spawn
@clear_cache_before_run()