mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-11 22:10:37 +00:00
add explantion
This commit is contained in:
@@ -1037,6 +1037,9 @@ def get_jit_fused_bert_output_forward():
|
||||
return forward
|
||||
|
||||
|
||||
# Fix the tgt_len size in sequence parallel attention:
|
||||
# same with the one in BertSdpaSelfAttention forward in v4.51.3 transformers except the
|
||||
# _, _, tgt_len, _ = query_layer.shape
|
||||
def get_bert_sequence_parallel_attention_forward(shard_config: ShardConfig):
|
||||
from transformers.models.bert.modeling_bert import BertSdpaSelfAttention
|
||||
|
||||
|
Reference in New Issue
Block a user