mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-05 02:51:59 +00:00
[misc] update pre-commit and run all files (#4752)
* [misc] update pre-commit * [misc] run pre-commit * [misc] remove useless configuration files * [misc] ignore cuda for clang-format
This commit is contained in:
@@ -14,10 +14,9 @@ from colossalai.testing import clear_cache_before_run, parameterize, rerun_if_ad
|
||||
from tests.kit.model_zoo import model_zoo
|
||||
|
||||
|
||||
@parameterize('lazy_init', [True, False])
|
||||
@parameterize("lazy_init", [True, False])
|
||||
def check_shardformer_with_ddp(lazy_init: bool):
|
||||
|
||||
sub_model_zoo = model_zoo.get_sub_registry('transformers_gpt')
|
||||
sub_model_zoo = model_zoo.get_sub_registry("transformers_gpt")
|
||||
|
||||
# create shardformer
|
||||
# ranks: [0, 1, 2, 3]
|
||||
@@ -72,7 +71,7 @@ def check_shardformer_with_ddp(lazy_init: bool):
|
||||
|
||||
def run_dist(rank, world_size, port):
|
||||
disable_existing_loggers()
|
||||
colossalai.launch(config={}, rank=rank, world_size=world_size, host='localhost', port=port, backend='nccl')
|
||||
colossalai.launch(config={}, rank=rank, world_size=world_size, host="localhost", port=port, backend="nccl")
|
||||
check_shardformer_with_ddp()
|
||||
|
||||
|
||||
|
Reference in New Issue
Block a user