add colossalai kernel module (#55)

This commit is contained in:
shenggan
2021-12-21 12:19:52 +08:00
committed by GitHub
parent 648f806315
commit 5c3843dc98
43 changed files with 8329 additions and 0 deletions

View File

@@ -0,0 +1,8 @@
from .jit.bias_dropout_add import bias_dropout_add_fused_train, bias_dropout_add_fused_inference
from .jit.bias_gelu import bias_gelu_impl
from .cuda_native import LayerNorm, FusedScaleMaskSoftmax, MultiHeadAttention
__all__ = [
"bias_dropout_add_fused_train", "bias_dropout_add_fused_inference", "bias_gelu_impl",
"LayerNorm", "FusedScaleMaskSoftmax", "MultiHeadAttention"
]