mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-07 20:10:17 +00:00
fix beam_width
This commit is contained in:
committed by
FrankLeeeee
parent
b2eb9cd186
commit
3ad1f3b78b
@@ -176,8 +176,12 @@ def llama_attn_forward(
|
||||
|
||||
|
||||
def generate_padding_position_id(input_ids: torch.Tensor) -> torch.Tensor:
|
||||
# Replace this code and use a more flexible method to obtain padding_id, avoiding directly setting padding_id like this.
|
||||
padding_id = 2
|
||||
attention_mask = input_ids.ne(padding_id).long()
|
||||
position_ids = attention_mask.long().cumsum(-1) - 1
|
||||
position_ids.masked_fill_(attention_mask == 0, 1)
|
||||
return position_ids
|
||||
|
||||
# def unpad_inputs(input_ids: torch.Tensor):
|
||||
|
||||
|
Reference in New Issue
Block a user