|
|
@ -7,7 +7,6 @@ from langchain_openai import OpenAI
|
|
|
|
os.environ["OPENAI_API_KEY"] = "foo"
|
|
|
|
os.environ["OPENAI_API_KEY"] = "foo"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.requires("openai")
|
|
|
|
|
|
|
|
def test_openai_model_param() -> None:
|
|
|
|
def test_openai_model_param() -> None:
|
|
|
|
llm = OpenAI(model="foo")
|
|
|
|
llm = OpenAI(model="foo")
|
|
|
|
assert llm.model_name == "foo"
|
|
|
|
assert llm.model_name == "foo"
|
|
|
@ -15,19 +14,16 @@ def test_openai_model_param() -> None:
|
|
|
|
assert llm.model_name == "foo"
|
|
|
|
assert llm.model_name == "foo"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.requires("openai")
|
|
|
|
|
|
|
|
def test_openai_model_kwargs() -> None:
|
|
|
|
def test_openai_model_kwargs() -> None:
|
|
|
|
llm = OpenAI(model_kwargs={"foo": "bar"})
|
|
|
|
llm = OpenAI(model_kwargs={"foo": "bar"})
|
|
|
|
assert llm.model_kwargs == {"foo": "bar"}
|
|
|
|
assert llm.model_kwargs == {"foo": "bar"}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.requires("openai")
|
|
|
|
|
|
|
|
def test_openai_invalid_model_kwargs() -> None:
|
|
|
|
def test_openai_invalid_model_kwargs() -> None:
|
|
|
|
with pytest.raises(ValueError):
|
|
|
|
with pytest.raises(ValueError):
|
|
|
|
OpenAI(model_kwargs={"model_name": "foo"})
|
|
|
|
OpenAI(model_kwargs={"model_name": "foo"})
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.requires("openai")
|
|
|
|
|
|
|
|
def test_openai_incorrect_field() -> None:
|
|
|
|
def test_openai_incorrect_field() -> None:
|
|
|
|
with pytest.warns(match="not default parameter"):
|
|
|
|
with pytest.warns(match="not default parameter"):
|
|
|
|
llm = OpenAI(foo="bar")
|
|
|
|
llm = OpenAI(foo="bar")
|
|
|
@ -46,3 +42,15 @@ def mock_completion() -> dict:
|
|
|
|
],
|
|
|
|
],
|
|
|
|
"usage": {"prompt_tokens": 1, "completion_tokens": 2, "total_tokens": 3},
|
|
|
|
"usage": {"prompt_tokens": 1, "completion_tokens": 2, "total_tokens": 3},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize(
|
|
|
|
|
|
|
|
"model",
|
|
|
|
|
|
|
|
[
|
|
|
|
|
|
|
|
"gpt-3.5-turbo-instruct",
|
|
|
|
|
|
|
|
"text-davinci-003",
|
|
|
|
|
|
|
|
],
|
|
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
def test_get_token_ids(model: str) -> None:
|
|
|
|
|
|
|
|
OpenAI(model=model).get_token_ids("foo")
|
|
|
|
|
|
|
|
return
|
|
|
|