From 60155de2a69630e0f8536d32c6d5d7481005d00a Mon Sep 17 00:00:00 2001 From: Zach Nussbaum Date: Wed, 12 Apr 2023 03:50:54 +0000 Subject: [PATCH] chore: peft --- configs/train/finetune_gptj.yaml | 12 ++++++------ configs/train/finetune_gptj_lora.yaml | 10 +++++----- configs/train/finetune_lora.yaml | 2 +- peft | 2 +- 4 files changed, 13 insertions(+), 13 deletions(-) diff --git a/configs/train/finetune_gptj.yaml b/configs/train/finetune_gptj.yaml index 1ff6a7bb..ef9802d6 100644 --- a/configs/train/finetune_gptj.yaml +++ b/configs/train/finetune_gptj.yaml @@ -2,12 +2,12 @@ model_name: "EleutherAI/gpt-j-6B" tokenizer_name: "EleutherAI/gpt-j-6B" gradient_checkpointing: true -save_name: "nomic-ai/gpt4all-warmup-lr" +save_name: # CHANGE # dataset streaming: false num_proc: 64 -dataset_path: "nomic-ai/turbo-500k-multi" +dataset_path: # CHANGE max_length: 1024 batch_size: 32 @@ -18,8 +18,8 @@ weight_decay: 0.0 eval_every: 500 eval_steps: 105 save_every: 500 -log_grads_every: 500 -output_dir: "ckpts/gpt4all-gptj-multinode" +log_grads_every: 100 +output_dir: # CHANGE checkpoint: null lora: false warmup_steps: 500 @@ -27,7 +27,7 @@ num_epochs: 2 # logging wandb: true -wandb_entity: vicuna -wandb_project_name: vicuna +wandb_entity: # CHANGE +wandb_project_name: # CHANGE seed: 42 diff --git a/configs/train/finetune_gptj_lora.yaml b/configs/train/finetune_gptj_lora.yaml index 325c30f7..c2668ddd 100644 --- a/configs/train/finetune_gptj_lora.yaml +++ b/configs/train/finetune_gptj_lora.yaml @@ -2,12 +2,12 @@ model_name: "EleutherAI/gpt-j-6b" tokenizer_name: "EleutherAI/gpt-j-6b" gradient_checkpointing: false -save_name: "nomic-ai/gpt4all-mosaic" +save_name: # CHANGE # dataset streaming: false num_proc: 64 -dataset_path: "nomic-ai/turbo-500k-multi" +dataset_path: # CHANGE max_length: 1024 batch_size: 1 @@ -19,7 +19,7 @@ eval_every: 500 eval_steps: 105 save_every: 500 log_grads_every: 500 -output_dir: "ckpts/gpt4all-gptj-multinode" +output_dir: # CHANGE checkpoint: null lora: true warmup_steps: 500 @@ -27,7 +27,7 @@ num_epochs: 2 # logging wandb: true -wandb_entity: zanussbaum -wandb_project_name: mosaic +wandb_entity: # CHANGE +wandb_project_name: # CHANGE seed: 42 diff --git a/configs/train/finetune_lora.yaml b/configs/train/finetune_lora.yaml index cf916d3b..7f270d85 100644 --- a/configs/train/finetune_lora.yaml +++ b/configs/train/finetune_lora.yaml @@ -2,7 +2,7 @@ model_name: # update tokenizer_name: # update gradient_checkpointing: false -save_name: "nomic-ai/gpt4all-lora-multi-turn" +save_name: # CHANGE # dataset streaming: false diff --git a/peft b/peft index deff03f2..098962fa 160000 --- a/peft +++ b/peft @@ -1 +1 @@ -Subproject commit deff03f2c251534fffd2511fc2d440e84cc54b1b +Subproject commit 098962fa6515f2e4fe83a757f5995d3ffbb1c373