mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-06 11:32:10 +00:00
[misc] refactor launch API and tensor constructor (#5666)
* [misc] remove config arg from initialize * [misc] remove old tensor contrusctor * [plugin] add npu support for ddp * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * [devops] fix doc test ci * [test] fix test launch * [doc] update launch doc --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
This commit is contained in:
@@ -35,12 +35,12 @@ def main():
|
||||
|
||||
if args.vscode_debug:
|
||||
colossalai.launch(
|
||||
config={}, rank=args.rank, world_size=args.world_size, host=args.host, port=args.port, backend=args.backend
|
||||
rank=args.rank, world_size=args.world_size, host=args.host, port=args.port, backend=args.backend
|
||||
)
|
||||
args.local_rank = -1
|
||||
args.log_interval = 1
|
||||
else:
|
||||
colossalai.launch_from_torch(config={}) # args.colossal_config
|
||||
colossalai.launch_from_torch() # args.colossal_config
|
||||
args.local_rank = int(os.environ["LOCAL_RANK"])
|
||||
logger.info(
|
||||
f"launch_from_torch, world size: {torch.distributed.get_world_size()} | "
|
||||
|
Reference in New Issue
Block a user