ColossalAI/op_builder/__init__.py
Hongxin Liu e5ce4c8ea6
[npu] add npu support for gemini and zero (#5067)
* [npu] setup device utils (#5047)

* [npu] add npu device support

* [npu] support low level zero

* [test] update npu zero plugin test

* [hotfix] fix import

* [test] recover tests

* [npu] gemini support npu (#5052)

* [npu] refactor device utils

* [gemini] support npu

* [example] llama2+gemini support npu

* [kernel] add arm cpu adam kernel (#5065)

* [kernel] add arm cpu adam

* [optim] update adam optimizer

* [kernel] arm cpu adam remove bf16 support
2023-11-20 16:12:41 +08:00

35 lines
1.1 KiB
Python

from .arm_cpu_adam import ArmCPUAdamBuilder
from .cpu_adam import CPUAdamBuilder
from .fused_optim import FusedOptimBuilder
from .layernorm import LayerNormBuilder
from .moe import MOEBuilder
from .multi_head_attn import MultiHeadAttnBuilder
from .scaled_masked_softmax import ScaledMaskedSoftmaxBuilder
from .scaled_upper_triangle_masked_softmax import ScaledUpperTrainglemaskedSoftmaxBuilder
ALL_OPS = {
"cpu_adam": CPUAdamBuilder,
"fused_optim": FusedOptimBuilder,
"moe": MOEBuilder,
"multi_head_attn": MultiHeadAttnBuilder,
"scaled_masked_softmax": ScaledMaskedSoftmaxBuilder,
"scaled_upper_triangle_masked_softmax": ScaledUpperTrainglemaskedSoftmaxBuilder,
"layernorm": LayerNormBuilder,
}
__all__ = [
"ALL_OPS",
"CPUAdamBuilder",
"FusedOptimBuilder",
"MultiHeadAttnBuilder",
"ScaledMaskedSoftmaxBuilder",
"ScaledUpperTrainglemaskedSoftmaxBuilder",
"MOEBuilder",
"MultiTensorSGDBuilder",
"MultiTensorAdamBuilder",
"MultiTensorLambBuilder",
"MultiTensorScaleBuilder",
"MultiTensorL2NormBuilder",
"ArmCPUAdamBuilder",
]