mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-05-31 11:25:27 +00:00
fix missing pad token (#5690)
Co-authored-by: Edenzzzz <wtan45@wisc.edu>
This commit is contained in:
parent
d3f34ee8cc
commit
c25f83c85f
@ -243,7 +243,12 @@ def main():
|
|||||||
# ====================================
|
# ====================================
|
||||||
# gpt2 pretrained model
|
# gpt2 pretrained model
|
||||||
|
|
||||||
cfg = AutoConfig.from_pretrained(model_name, num_labels=data_builder.num_labels)
|
cfg = AutoConfig.from_pretrained(
|
||||||
|
model_name,
|
||||||
|
num_labels=data_builder.num_labels,
|
||||||
|
pad_token=data_builder.tokenizer.pad_token,
|
||||||
|
pad_token_id=data_builder.tokenizer.pad_token_id,
|
||||||
|
)
|
||||||
|
|
||||||
if model_name == "gpt2":
|
if model_name == "gpt2":
|
||||||
model = GPT2ForSequenceClassification.from_pretrained(model_name, config=cfg).cuda()
|
model = GPT2ForSequenceClassification.from_pretrained(model_name, config=cfg).cuda()
|
||||||
|
Loading…
Reference in New Issue
Block a user