You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
gpt4all/gpt4all-bindings/python/tests/test_pyllmodel.py

62 lines
1.5 KiB
Python

from io import StringIO
import sys
from gpt4all import pyllmodel
# TODO: Integration test for loadmodel and prompt.
# # Right now, too slow b/c it requires file download.
def test_create_gptj():
gptj = pyllmodel.GPTJModel()
assert gptj.model_type == "gptj"
def test_create_llama():
llama = pyllmodel.LlamaModel()
assert llama.model_type == "llama"
def test_create_mpt():
mpt = pyllmodel.MPTModel()
assert mpt.model_type == "mpt"
def prompt_unloaded_mpt():
mpt = pyllmodel.MPTModel()
old_stdout = sys.stdout
collect_response = StringIO()
sys.stdout = collect_response
mpt.prompt("hello there")
response = collect_response.getvalue()
sys.stdout = old_stdout
response = response.strip()
assert response == "MPT ERROR: prompt won't work with an unloaded model!"
def prompt_unloaded_gptj():
gptj = pyllmodel.GPTJModel()
old_stdout = sys.stdout
collect_response = StringIO()
sys.stdout = collect_response
gptj.prompt("hello there")
response = collect_response.getvalue()
sys.stdout = old_stdout
response = response.strip()
assert response == "GPT-J ERROR: prompt won't work with an unloaded model!"
def prompt_unloaded_llama():
llama = pyllmodel.LlamaModel()
old_stdout = sys.stdout
collect_response = StringIO()
sys.stdout = collect_response
llama.prompt("hello there")
response = collect_response.getvalue()
sys.stdout = old_stdout
response = response.strip()
assert response == "LLAMA ERROR: prompt won't work with an unloaded model!"