mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2026-01-16 07:28:45 +00:00
* update extension * update cpu adam * update is * add doc for cpu adam * update kernel * update commit * update flash * update memory efficient * update flash attn * update flash attention loader * update api * fix * update doc * update example time limit * reverse change * fix doc * remove useless kernel * fix * not use warning * update * update
15 lines
423 B
Python
15 lines
423 B
Python
from .cpu_adam_loader import CPUAdamLoader
|
|
from .cuda_native import FusedScaleMaskSoftmax, LayerNorm, MultiHeadAttention
|
|
from .extensions.flash_attention import AttnMaskType
|
|
from .flash_attention_loader import ColoAttention, FlashAttentionLoader
|
|
|
|
__all__ = [
|
|
"LayerNorm",
|
|
"FusedScaleMaskSoftmax",
|
|
"MultiHeadAttention",
|
|
"CPUAdamLoader",
|
|
"FlashAttentionLoader",
|
|
"ColoAttention",
|
|
"AttnMaskType",
|
|
]
|