mirror of https://github.com/nomic-ai/gpt4all
added eval code
parent
4e8e7e7300
commit
6d98aef302
@ -0,0 +1,17 @@
|
||||
# model/tokenizer
|
||||
model_name: "zpn/llama-7b"
|
||||
tokenizer_name: "zpn/llama-7b"
|
||||
lora: true
|
||||
lora_path: "tloen/alpaca-lora-7b"
|
||||
|
||||
|
||||
|
||||
max_new_tokens: 512
|
||||
temperature: 0.001
|
||||
prompt: |
|
||||
#this code prints a string reversed
|
||||
my_string = "hello how are you"
|
||||
print(len(my_string))
|
||||
|
||||
|
||||
My code above does not work. Can you help me?
|
@ -0,0 +1,14 @@
|
||||
# model/tokenizer
|
||||
model_name: "nomic-ai/vicuna-full-multi-turn_epoch_0"
|
||||
tokenizer_name: "zpn/llama-7b"
|
||||
lora_path: "no-lora"
|
||||
|
||||
max_new_tokens: 512
|
||||
temperature: 0.001
|
||||
prompt: |
|
||||
#this code prints a string reversed
|
||||
my_string = "hello how are you"
|
||||
print(len(my_string))
|
||||
|
||||
|
||||
My code above does not work. Can you help me?
|
@ -0,0 +1,15 @@
|
||||
# model/tokenizer
|
||||
model_name: "zpn/llama-7b"
|
||||
tokenizer_name: "zpn/llama-7b"
|
||||
lora: true
|
||||
lora_path: "nomic-ai/vicuna-lora-multi-turn_epoch_2"
|
||||
|
||||
max_new_tokens: 512
|
||||
temperature: 0.001
|
||||
prompt: |
|
||||
#this code prints a string reversed
|
||||
my_string = "hello how are you"
|
||||
print(len(my_string))
|
||||
|
||||
|
||||
My code above does not work. Can you help me?
|
@ -0,0 +1,15 @@
|
||||
# model/tokenizer
|
||||
model_name: "zpn/llama-7b"
|
||||
tokenizer_name: "zpn/llama-7b"
|
||||
lora: true
|
||||
lora_path: "nomic-ai/vicuna-lora-multi-turn_epoch_3"
|
||||
|
||||
max_new_tokens: 512
|
||||
temperature: 0.001
|
||||
prompt: |
|
||||
#this code prints a string reversed
|
||||
my_string = "hello how are you"
|
||||
print(len(my_string))
|
||||
|
||||
|
||||
My code above does not work. Can you help me?
|
@ -0,0 +1,22 @@
|
||||
import glob
|
||||
import pickle
|
||||
import numpy as np
|
||||
from matplotlib import pyplot as plt
|
||||
|
||||
plt.figure()
|
||||
for fpath in glob.glob('./eval_data/*multi*.pkl'):
|
||||
parts = fpath.split('__')
|
||||
model_name = parts[1].replace('model-', '').replace('.pkl', '')
|
||||
lora_name = parts[2].replace('lora-', '').replace('.pkl', '')
|
||||
with open(fpath, 'rb') as f:
|
||||
data = pickle.load(f)
|
||||
perplexities = data['perplexities']
|
||||
perplexities = np.nan_to_num(perplexities, 100)
|
||||
perplexities = np.clip(perplexities, 0, 100)
|
||||
plt.hist(perplexities, label='{}-{}'.format(model_name, lora_name), alpha=.5)
|
||||
|
||||
plt.xlabel('Perplexity')
|
||||
plt.ylabel('Frequency')
|
||||
plt.legend()
|
||||
plt.savefig('figs/perplexity_hist.png')
|
||||
|
Loading…
Reference in New Issue