From 5de765410a7faf13d9ffc194ab9d6e1d3256161a Mon Sep 17 00:00:00 2001 From: Zach Nussbaum Date: Sat, 8 Apr 2023 20:37:51 +0000 Subject: [PATCH] fix: add wd + min lr to config --- configs/train/finetune_lora.yaml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/configs/train/finetune_lora.yaml b/configs/train/finetune_lora.yaml index acdc0e95..cf916d3b 100644 --- a/configs/train/finetune_lora.yaml +++ b/configs/train/finetune_lora.yaml @@ -7,12 +7,14 @@ save_name: "nomic-ai/gpt4all-lora-multi-turn" # dataset streaming: false num_proc: 64 -dataset_path: "data_multiturn" +dataset_path: "nomic-ai/turbo-500k-multi" max_length: 1024 batch_size: 4 # train dynamics lr: 5.0e-5 +min_lr: 0 +weight_decay: 0.0 eval_every: 2000 eval_steps: 100 save_every: 2000