fix: gptj multinode

mosaic
Zach Nussbaum 1 year ago
parent fde7d9506f
commit 0a3834d086

@ -1,6 +1,6 @@
{
"train_batch_size": "auto",
"gradient_accumulation_steps": 4,
"gradient_accumulation_steps": "auto",
"train_micro_batch_size_per_gpu": "auto",
"fp16": {
"enabled": "auto",
@ -19,7 +19,7 @@
"device": "none"
},
"offload_optimizer": {
"device": "none"
"device": "cpu"
},
"allgather_partitions": true,
"allgather_bucket_size": 5e8,

@ -2,24 +2,24 @@
model_name: "EleutherAI/gpt-j-6B"
tokenizer_name: "EleutherAI/gpt-j-6B"
gradient_checkpointing: true
save_name: "nomic-ai/gpt4all-gptj-multiturn-lr-aggressive"
save_name: "nomic-ai/gpt4all-gptj-multinode"
# dataset
streaming: false
num_proc: 64
dataset_path: "data_multiplus"
max_length: 1024
batch_size: 8
batch_size: 32
# train dynamics
lr: 2.0e-5
lr: 4.0e-5
min_lr: 0
weight_decay: 0.0
eval_every: 200
eval_every: 100
eval_steps: 105
save_every: 400
log_grads_every: 200
output_dir: "ckpts/gpt4all-gptj-full-multiturn-lr-aggreive"
save_every: 100
log_grads_every: 100
output_dir: "ckpts/gpt4all-gptj-multinode"
checkpoint: null
lora: false
warmup_steps: 500

Loading…
Cancel
Save