[misc] refactor launch API and tensor constructor (#5666)

* [misc] remove config arg from initialize

* [misc] remove old tensor contrusctor

* [plugin] add npu support for ddp

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

* [devops] fix doc test ci

* [test] fix test launch

* [doc] update launch doc

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
This commit is contained in:
Hongxin Liu
2024-04-29 10:40:11 +08:00
committed by GitHub
parent 91fa553775
commit 7f8b16635b
223 changed files with 294 additions and 403 deletions

View File

@@ -71,7 +71,7 @@ def check_p2p_communication():
def run_dist(rank, world_size, port):
colossalai.launch(config={}, rank=rank, world_size=world_size, port=port, host="localhost")
colossalai.launch(rank=rank, world_size=world_size, port=port, host="localhost")
check_p2p_communication()

View File

@@ -58,7 +58,7 @@ def run_pp(
This test is to examine the correctness of interleaved 1F1B, compared with torch.
Be aware it contains some hardcodes.
"""
colossalai.launch(config=dict(), rank=rank, world_size=world_size, port=port, host="localhost")
colossalai.launch(rank=rank, world_size=world_size, port=port, host="localhost")
# create model
seed_all(1453)

View File

@@ -148,7 +148,7 @@ def run_dist(
num_microbatch: int,
batch_size: int,
):
colossalai.launch(config=dict(), rank=rank, world_size=world_size, port=port, host="localhost")
colossalai.launch(rank=rank, world_size=world_size, port=port, host="localhost")
examine_pp(num_microbatch, batch_size)

View File

@@ -64,7 +64,7 @@ def check_stage_manager():
def run_dist(rank, world_size, port):
colossalai.launch(config={}, rank=rank, world_size=world_size, port=port, host="localhost")
colossalai.launch(rank=rank, world_size=world_size, port=port, host="localhost")
check_stage_manager()