mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-08-06 10:34:23 +00:00
* [npu] setup device utils (#5047) * [npu] add npu device support * [npu] support low level zero * [test] update npu zero plugin test * [hotfix] fix import * [test] recover tests * [npu] gemini support npu (#5052) * [npu] refactor device utils * [gemini] support npu * [example] llama2+gemini support npu * [kernel] add arm cpu adam kernel (#5065) * [kernel] add arm cpu adam * [optim] update adam optimizer * [kernel] arm cpu adam remove bf16 support
34 lines
769 B
Python
34 lines
769 B
Python
from .common import (
|
|
_cast_float,
|
|
conditional_context,
|
|
disposable,
|
|
ensure_path_exists,
|
|
free_storage,
|
|
is_ddp_ignored,
|
|
set_seed,
|
|
)
|
|
from .device import IS_NPU_AVAILABLE, empty_cache, get_current_device, set_device, set_to_cuda, synchronize
|
|
from .multi_tensor_apply import multi_tensor_applier
|
|
from .tensor_detector import TensorDetector
|
|
from .timer import MultiTimer, Timer
|
|
|
|
__all__ = [
|
|
"conditional_context",
|
|
"get_current_device",
|
|
"synchronize",
|
|
"empty_cache",
|
|
"set_to_cuda",
|
|
"Timer",
|
|
"MultiTimer",
|
|
"multi_tensor_applier",
|
|
"TensorDetector",
|
|
"ensure_path_exists",
|
|
"disposable",
|
|
"_cast_float",
|
|
"free_storage",
|
|
"set_seed",
|
|
"is_ddp_ignored",
|
|
"set_device",
|
|
"IS_NPU_AVAILABLE",
|
|
]
|