2023-04-04 20:53:23 +00:00
|
|
|
# model/tokenizer
|
|
|
|
model_name: "EleutherAI/gpt-j-6B"
|
|
|
|
tokenizer_name: "EleutherAI/gpt-j-6B"
|
|
|
|
gradient_checkpointing: true
|
2023-04-07 20:54:29 +00:00
|
|
|
save_name: "nomic-ai/gpt4all-mosaic"
|
2023-04-04 20:53:23 +00:00
|
|
|
|
|
|
|
# dataset
|
|
|
|
streaming: false
|
|
|
|
num_proc: 64
|
2023-04-07 20:54:29 +00:00
|
|
|
dataset_path: "nomic-ai/turbo-500k-multi"
|
2023-04-04 20:53:23 +00:00
|
|
|
max_length: 1024
|
2023-04-07 21:41:45 +00:00
|
|
|
batch_size: 8
|
2023-04-04 20:53:23 +00:00
|
|
|
|
|
|
|
# train dynamics
|
2023-04-05 20:42:35 +00:00
|
|
|
lr: 2.0e-5
|
2023-04-04 20:53:23 +00:00
|
|
|
min_lr: 0
|
|
|
|
weight_decay: 0.0
|
2023-04-05 20:42:35 +00:00
|
|
|
eval_every: 500
|
2023-04-04 20:53:23 +00:00
|
|
|
eval_steps: 105
|
2023-04-05 20:42:35 +00:00
|
|
|
save_every: 500
|
|
|
|
log_grads_every: 500
|
2023-04-05 02:52:44 +00:00
|
|
|
output_dir: "ckpts/gpt4all-gptj-multinode"
|
2023-04-04 20:53:23 +00:00
|
|
|
checkpoint: null
|
|
|
|
lora: false
|
|
|
|
warmup_steps: 500
|
2023-04-07 20:54:29 +00:00
|
|
|
num_epochs: 2
|
2023-04-04 20:53:23 +00:00
|
|
|
|
|
|
|
# logging
|
|
|
|
wandb: true
|
|
|
|
wandb_entity: vicuna
|
|
|
|
wandb_project_name: vicuna
|
|
|
|
seed: 42
|
|
|
|
|