mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-04 18:40:28 +00:00
[feat] refactored extension module (#5298)
* [feat] refactored extension module * polish * polish * polish * polish * polish * polish * polish * polish * polish * polish
This commit is contained in:
@@ -530,7 +530,7 @@ class GPTJPipelineForwards:
|
||||
def get_gptj_flash_attention_forward():
|
||||
from transformers.models.gptj.modeling_gptj import GPTJAttention
|
||||
|
||||
from colossalai.kernel.cuda_native import AttnMaskType, ColoAttention
|
||||
from colossalai.nn.layer.colo_attention import AttnMaskType, ColoAttention
|
||||
|
||||
def split_heads(tensor, num_attention_heads, attn_head_size, rotary):
|
||||
"""
|
||||
|
Reference in New Issue
Block a user