mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2026-01-29 21:49:54 +00:00
[npu] change device to accelerator api (#5239)
* update accelerator * fix timer * fix amp * update * fix * update bug * add error raise * fix autocast * fix set device * remove doc accelerator * update doc * update doc * update doc * use nullcontext * update cpu * update null context * change time limit for example * udpate * update * update * update * [npu] polish accelerator code --------- Co-authored-by: Xuanlei Zhao <xuanlei.zhao@gmail.com> Co-authored-by: zxl <43881818+oahzxl@users.noreply.github.com>
This commit is contained in:
@@ -5,13 +5,13 @@ import torch
|
||||
from torch.utils._pytree import tree_map
|
||||
|
||||
import colossalai
|
||||
from colossalai.accelerator import get_accelerator
|
||||
from colossalai.auto_parallel.offload.amp_optimizer import AMPOptimizer
|
||||
from colossalai.auto_parallel.offload.mem_optimize import memory_optimize
|
||||
from colossalai.auto_parallel.offload.solver import NOT_NVML
|
||||
from colossalai.fx.profiler import parameter_size
|
||||
from colossalai.nn.optimizer import HybridAdam
|
||||
from colossalai.testing import parameterize, rerun_if_address_is_in_use, spawn
|
||||
from colossalai.utils import get_current_device
|
||||
from colossalai.zero import ColoInitContext, zero_model_wrapper, zero_optim_wrapper
|
||||
from tests.test_auto_parallel.test_offload.model_utils import *
|
||||
from tests.test_tensor.common_utils import set_seed
|
||||
@@ -31,7 +31,7 @@ def exam_fwd_bwd(model_name: str, memory_budget: float, solver_name: str):
|
||||
64,
|
||||
8,
|
||||
),
|
||||
device=get_current_device(),
|
||||
device=get_accelerator().get_current_device(),
|
||||
)
|
||||
criterion = LMLoss()
|
||||
|
||||
|
||||
@@ -10,12 +10,12 @@ try:
|
||||
except:
|
||||
NO_CODEGEN = True
|
||||
|
||||
from colossalai.accelerator import get_accelerator
|
||||
from colossalai.device.device_mesh import DeviceMesh
|
||||
from colossalai.initialize import launch
|
||||
from colossalai.logging import disable_existing_loggers
|
||||
from colossalai.nn.optimizer import HybridAdam
|
||||
from colossalai.testing import assert_close, rerun_if_address_is_in_use, run_on_environment_flag, spawn
|
||||
from colossalai.utils import get_current_device
|
||||
from colossalai.zero import zero_model_wrapper, zero_optim_wrapper
|
||||
|
||||
|
||||
@@ -72,7 +72,11 @@ def check_auto_parallel_with_gemini(rank, world_size, port):
|
||||
print("=" * msg_length)
|
||||
|
||||
gemini_config = dict(
|
||||
strict_ddp_mode=False, device=get_current_device(), placement_policy="cpu", pin_memory=True, search_range_m=128
|
||||
strict_ddp_mode=False,
|
||||
device=get_accelerator().get_current_device(),
|
||||
placement_policy="cpu",
|
||||
pin_memory=True,
|
||||
search_range_m=128,
|
||||
)
|
||||
|
||||
gm = zero_model_wrapper(gm, zero_stage=3, gemini_config=gemini_config)
|
||||
|
||||
Reference in New Issue
Block a user