langchain/libs/community/tests/integration_tests/llms/test_cohere.py
ccurme 481d3855dc
patch: remove usage of llm, chat model __call__ (#20788)
- `llm(prompt)` -> `llm.invoke(prompt)`
- `llm(prompt=prompt` -> `llm.invoke(prompt)` (same with `messages=`)
- `llm(prompt, callbacks=callbacks)` -> `llm.invoke(prompt,
config={"callbacks": callbacks})`
- `llm(prompt, **kwargs)` -> `llm.invoke(prompt, **kwargs)`
2024-04-24 19:39:23 -04:00

36 lines
1.1 KiB
Python

"""Test Cohere API wrapper."""
from pathlib import Path
from langchain_core.pydantic_v1 import SecretStr
from pytest import MonkeyPatch
from langchain_community.llms.cohere import Cohere
from langchain_community.llms.loading import load_llm
from tests.integration_tests.llms.utils import assert_llm_equality
def test_cohere_call() -> None:
"""Test valid call to cohere."""
llm = Cohere(max_tokens=10)
output = llm.invoke("Say foo:")
assert isinstance(output, str)
def test_cohere_api_key(monkeypatch: MonkeyPatch) -> None:
"""Test that cohere api key is a secret key."""
# test initialization from init
assert isinstance(Cohere(cohere_api_key="1").cohere_api_key, SecretStr)
# test initialization from env variable
monkeypatch.setenv("COHERE_API_KEY", "secret-api-key")
assert isinstance(Cohere().cohere_api_key, SecretStr)
def test_saving_loading_llm(tmp_path: Path) -> None:
"""Test saving/loading an Cohere LLM."""
llm = Cohere(max_tokens=10)
llm.save(file_path=tmp_path / "cohere.yaml")
loaded_llm = load_llm(tmp_path / "cohere.yaml")
assert_llm_equality(llm, loaded_llm)