mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-04-28 11:45:23 +00:00
* [feature] refactor colo attention (#5462) * [extension] update api * [feature] add colo attention * [feature] update sdpa * [feature] update npu attention * [feature] update flash-attn * [test] add flash attn test * [test] update flash attn test * [shardformer] update modeling to fit colo attention (#5465) * [misc] refactor folder structure * [shardformer] update llama flash-attn * [shardformer] fix llama policy * [devops] update tensornvme install * [test] update llama test * [shardformer] update colo attn kernel dispatch * [shardformer] update blip2 * [shardformer] update chatglm * [shardformer] update gpt2 * [shardformer] update gptj * [shardformer] update opt * [shardformer] update vit * [shardformer] update colo attention mask prep * [shardformer] update whisper * [test] fix shardformer tests (#5514) * [test] fix shardformer tests * [test] fix shardformer tests
33 lines
1.1 KiB
Python
33 lines
1.1 KiB
Python
from .cpu_adam import CpuAdamArmExtension, CpuAdamX86Extension
|
|
from .flash_attention import FlashAttentionDaoCudaExtension, FlashAttentionNpuExtension, FlashAttentionSdpaCudaExtension
|
|
from .layernorm import LayerNormCudaExtension
|
|
from .moe import MoeCudaExtension
|
|
from .optimizer import FusedOptimizerCudaExtension
|
|
from .softmax import ScaledMaskedSoftmaxCudaExtension, ScaledUpperTriangleMaskedSoftmaxCudaExtension
|
|
|
|
ALL_EXTENSIONS = [
|
|
CpuAdamArmExtension,
|
|
CpuAdamX86Extension,
|
|
LayerNormCudaExtension,
|
|
MoeCudaExtension,
|
|
FusedOptimizerCudaExtension,
|
|
ScaledMaskedSoftmaxCudaExtension,
|
|
ScaledUpperTriangleMaskedSoftmaxCudaExtension,
|
|
FlashAttentionDaoCudaExtension,
|
|
FlashAttentionSdpaCudaExtension,
|
|
FlashAttentionNpuExtension,
|
|
]
|
|
|
|
__all__ = [
|
|
"CpuAdamArmExtension",
|
|
"CpuAdamX86Extension",
|
|
"LayerNormCudaExtension",
|
|
"MoeCudaExtension",
|
|
"FusedOptimizerCudaExtension",
|
|
"ScaledMaskedSoftmaxCudaExtension",
|
|
"ScaledUpperTriangleMaskedSoftmaxCudaExtension",
|
|
"FlashAttentionDaoCudaExtension",
|
|
"FlashAttentionSdpaCudaExtension",
|
|
"FlashAttentionNpuExtension",
|
|
]
|