[misc] update pre-commit and run all files (#4752)

* [misc] update pre-commit

* [misc] run pre-commit

* [misc] remove useless configuration files

* [misc] ignore cuda for clang-format
This commit is contained in:
Hongxin Liu
2023-09-19 14:20:26 +08:00
committed by GitHub
parent 3c6b831c26
commit 079bf3cb26
1268 changed files with 50037 additions and 38444 deletions

View File

@@ -10,7 +10,7 @@ from colossalai.shardformer.layer import FusedLayerNorm
from colossalai.testing import parameterize, rerun_if_address_is_in_use, spawn
@parameterize('lazy_init', [False, True])
@parameterize("lazy_init", [False, True])
def check_layernorm(lazy_init: bool):
ctx = LazyInitContext() if lazy_init else nullcontext()
@@ -41,7 +41,7 @@ def check_layernorm(lazy_init: bool):
def run_dist(rank, world_size, port):
colossalai.launch(config={}, rank=rank, world_size=world_size, host='localhost', port=port, backend='nccl')
colossalai.launch(config={}, rank=rank, world_size=world_size, host="localhost", port=port, backend="nccl")
check_layernorm()
@@ -50,5 +50,5 @@ def test_layernorm():
spawn(run_dist, nprocs=2)
if __name__ == '__main__':
if __name__ == "__main__":
test_layernorm()