# model/tokenizer model_name: # update tokenizer_name: # update gradient_checkpointing: false save_name: # CHANGE # dataset streaming: false num_proc: 64 dataset_path: "nomic-ai/turbo-500k-multi" max_length: 1024 batch_size: 4 # train dynamics lr: 5.0e-5 min_lr: 0 weight_decay: 0.0 eval_every: 2000 eval_steps: 100 save_every: 2000 output_dir: "ckpts/gpt4all-lora-multi" checkpoint: null lora: true warmup_steps: 100 num_epochs: 2 # logging wandb: true wandb_entity: # update wandb_project_name: # update seed: 42