gpt4all/gpt4all-training/configs/inference/gptj.yaml
2023-05-01 15:45:23 -04:00

15 lines
243 B
YAML

# model/tokenizer
model_name: "nomic-ai/gpt4all-warmup-lr-epoch_1"
tokenizer_name: "EleutherAI/gpt-j-6B"
# dataset
streaming: false
num_proc: 64
dataset_path: "nomic-ai/turbo-500k-multi"
max_length: 1024
batch_size: 32
# logging
seed: 42