mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-06-22 05:29:36 +00:00
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
This commit is contained in:
parent
07fa048895
commit
efb2d98da0
@ -131,7 +131,7 @@ class T5PipelineForwards:
|
||||
|
||||
# We can provide a self-attention mask of dimensions [batch_size, from_seq_length, to_seq_length]
|
||||
# ourselves in which case we just need to make it broadcastable to all heads.
|
||||
extended_attention_mask = self.get_extended_attention_mask(attention_mask, input_shape)
|
||||
self.get_extended_attention_mask(attention_mask, input_shape)
|
||||
|
||||
# If a 2D or 3D attention mask is provided for the cross-attention
|
||||
# we need to make broadcastable to [batch_size, num_heads, seq_length, seq_length]
|
||||
@ -159,7 +159,6 @@ class T5PipelineForwards:
|
||||
else:
|
||||
causal_mask = None
|
||||
|
||||
|
||||
# Prepare head mask if needed
|
||||
head_mask = self.get_head_mask(head_mask, self.config.num_layers)
|
||||
cross_attn_head_mask = self.get_head_mask(cross_attn_head_mask, self.config.num_layers)
|
||||
|
Loading…
Reference in New Issue
Block a user