From 07798cfbd2d6184c774697ab1af2b6c0dad0a5d3 Mon Sep 17 00:00:00 2001 From: Zach Nussbaum Date: Tue, 28 Mar 2023 20:58:33 -0700 Subject: [PATCH] Update finetune_lora.yaml --- configs/train/finetune_lora.yaml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/configs/train/finetune_lora.yaml b/configs/train/finetune_lora.yaml index 47b1901e..acdc0e95 100644 --- a/configs/train/finetune_lora.yaml +++ b/configs/train/finetune_lora.yaml @@ -1,8 +1,8 @@ # model/tokenizer -model_name: "zpn/llama-7b" -tokenizer_name: "zpn/llama-7b" +model_name: # update +tokenizer_name: # update gradient_checkpointing: false -save_name: "nomic-ai/vicuna-lora-multi-turn" +save_name: "nomic-ai/gpt4all-lora-multi-turn" # dataset streaming: false @@ -16,7 +16,7 @@ lr: 5.0e-5 eval_every: 2000 eval_steps: 100 save_every: 2000 -output_dir: "ckpts/llama-7b-lora-multi" +output_dir: "ckpts/gpt4all-lora-multi" checkpoint: null lora: true warmup_steps: 100 @@ -24,6 +24,6 @@ num_epochs: 2 # logging wandb: true -wandb_entity: vicuna -wandb_project_name: vicuna -seed: 42 \ No newline at end of file +wandb_entity: # update +wandb_project_name: # update +seed: 42