mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-08-23 10:11:37 +00:00
* cherry-pick flash attention 2 cherry-pick flash attention 2 * [shardformer] update shardformer to use flash attention 2 [shardformer] update shardformer to use flash attention 2, fix [shardformer] update shardformer to use flash attention 2, fix [shardformer] update shardformer to use flash attention 2, fix
10 lines
389 B
Python
10 lines
389 B
Python
from .layer_norm import MixedFusedLayerNorm as LayerNorm
|
|
from .mha.mha import ColoAttention
|
|
from .multihead_attention import MultiHeadAttention
|
|
from .scaled_softmax import AttnMaskType, FusedScaleMaskSoftmax, ScaledUpperTriangMaskedSoftmax
|
|
|
|
__all__ = [
|
|
'LayerNorm', 'MultiHeadAttention', 'FusedScaleMaskSoftmax', 'ScaledUpperTriangMaskedSoftmax', 'ColoAttention',
|
|
'AttnMaskType'
|
|
]
|