mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-01 09:07:51 +00:00
[hotfix] Fix examples no pad token & auto parallel codegen bug; (#5606)
* fix no pad token bug * fixed some auto parallel codegen bug, but might not run on torch 2.1 --------- Co-authored-by: Edenzzzz <wtan45@wisc.edu>
This commit is contained in:
@@ -62,6 +62,8 @@ class GLUEDataBuilder:
|
||||
self.text_fields = self.task_text_field_map[task_name]
|
||||
self.num_labels = self.glue_task_num_labels[task_name]
|
||||
self.tokenizer: PreTrainedTokenizer = AutoTokenizer.from_pretrained(self.model_name_or_path, use_fast=True)
|
||||
if not getattr(self.tokenizer, "pad_token", None):
|
||||
self.tokenizer.pad_token = self.tokenizer._eos_token
|
||||
self.setup()
|
||||
|
||||
def setup(self):
|
||||
|
Reference in New Issue
Block a user