mirror of
https://github.com/nomic-ai/gpt4all.git
synced 2025-06-26 15:31:55 +00:00
feat: lora gptj
This commit is contained in:
parent
dcc5204352
commit
fbb788e451
33
configs/train/finetune_gptj_lora.yaml
Normal file
33
configs/train/finetune_gptj_lora.yaml
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
# model/tokenizer
|
||||||
|
model_name: "EleutherAI/gpt-j-6B"
|
||||||
|
tokenizer_name: "EleutherAI/gpt-j-6B"
|
||||||
|
gradient_checkpointing: false
|
||||||
|
save_name: "nomic-ai/gpt4all-mosaic"
|
||||||
|
|
||||||
|
# dataset
|
||||||
|
streaming: false
|
||||||
|
num_proc: 64
|
||||||
|
dataset_path: "nomic-ai/turbo-500k-multi"
|
||||||
|
max_length: 1024
|
||||||
|
batch_size: 4
|
||||||
|
|
||||||
|
# train dynamics
|
||||||
|
lr: 2.0e-5
|
||||||
|
min_lr: 0
|
||||||
|
weight_decay: 0.0
|
||||||
|
eval_every: 500
|
||||||
|
eval_steps: 105
|
||||||
|
save_every: 500
|
||||||
|
log_grads_every: 500
|
||||||
|
output_dir: "ckpts/gpt4all-gptj-multinode"
|
||||||
|
checkpoint: null
|
||||||
|
lora: true
|
||||||
|
warmup_steps: 500
|
||||||
|
num_epochs: 2
|
||||||
|
|
||||||
|
# logging
|
||||||
|
wandb: true
|
||||||
|
wandb_entity: zanussbaum
|
||||||
|
wandb_project_name: mosaic
|
||||||
|
seed: 42
|
||||||
|
|
Loading…
Reference in New Issue
Block a user