mirror of
https://github.com/hwchase17/langchain
synced 2024-11-10 01:10:59 +00:00
47 lines
1.4 KiB
Python
47 lines
1.4 KiB
Python
from langchain_core.load.dump import dumpd, dumps
|
|
from langchain_core.load.load import load, loads
|
|
|
|
from langchain_openai import ChatOpenAI, OpenAI
|
|
|
|
|
|
def test_loads_openai_llm() -> None:
|
|
llm = OpenAI(model="davinci", temperature=0.5, openai_api_key="hello")
|
|
llm_string = dumps(llm)
|
|
llm2 = loads(llm_string, secrets_map={"OPENAI_API_KEY": "hello"})
|
|
|
|
assert llm2 == llm
|
|
llm_string_2 = dumps(llm2)
|
|
assert llm_string_2 == llm_string
|
|
assert isinstance(llm2, OpenAI)
|
|
|
|
|
|
def test_load_openai_llm() -> None:
|
|
llm = OpenAI(model="davinci", temperature=0.5, openai_api_key="hello")
|
|
llm_obj = dumpd(llm)
|
|
llm2 = load(llm_obj, secrets_map={"OPENAI_API_KEY": "hello"})
|
|
|
|
assert llm2 == llm
|
|
assert dumpd(llm2) == llm_obj
|
|
assert isinstance(llm2, OpenAI)
|
|
|
|
|
|
def test_loads_openai_chat() -> None:
|
|
llm = ChatOpenAI(model="gpt-3.5-turbo", temperature=0.5, openai_api_key="hello")
|
|
llm_string = dumps(llm)
|
|
llm2 = loads(llm_string, secrets_map={"OPENAI_API_KEY": "hello"})
|
|
|
|
assert llm2 == llm
|
|
llm_string_2 = dumps(llm2)
|
|
assert llm_string_2 == llm_string
|
|
assert isinstance(llm2, ChatOpenAI)
|
|
|
|
|
|
def test_load_openai_chat() -> None:
|
|
llm = ChatOpenAI(model="gpt-3.5-turbo", temperature=0.5, openai_api_key="hello")
|
|
llm_obj = dumpd(llm)
|
|
llm2 = load(llm_obj, secrets_map={"OPENAI_API_KEY": "hello"})
|
|
|
|
assert llm2 == llm
|
|
assert dumpd(llm2) == llm_obj
|
|
assert isinstance(llm2, ChatOpenAI)
|