mirror of
https://github.com/hwchase17/langchain
synced 2024-11-10 01:10:59 +00:00
56ac94e014
**Description:** This PR adds a chat model integration for [Snowflake Cortex](https://docs.snowflake.com/en/user-guide/snowflake-cortex/llm-functions), which gives an instant access to industry-leading large language models (LLMs) trained by researchers at companies like Mistral, Reka, Meta, and Google, including [Snowflake Arctic](https://www.snowflake.com/en/data-cloud/arctic/), an open enterprise-grade model developed by Snowflake. **Dependencies:** Snowflake's [snowpark](https://pypi.org/project/snowflake-snowpark-python/) library is required for using this integration. **Twitter handle:** [@gethouseware](https://twitter.com/gethouseware) - [x] **Add tests and docs**: 1. integration tests: `libs/community/tests/integration_tests/chat_models/test_snowflake.py` 2. unit tests: `libs/community/tests/unit_tests/chat_models/test_snowflake.py` 3. example notebook: `docs/docs/integrations/chat/snowflake.ipynb` - [x] **Lint and test**: Run `make format`, `make lint` and `make test` from the root of the package(s) you've modified. See contribution guidelines for more: https://python.langchain.com/docs/contributing/
60 lines
2.1 KiB
Python
60 lines
2.1 KiB
Python
"""Test ChatSnowflakeCortex
|
|
Note: This test must be run with the following environment variables set:
|
|
SNOWFLAKE_ACCOUNT="YOUR_SNOWFLAKE_ACCOUNT",
|
|
SNOWFLAKE_USERNAME="YOUR_SNOWFLAKE_USERNAME",
|
|
SNOWFLAKE_PASSWORD="YOUR_SNOWFLAKE_PASSWORD",
|
|
SNOWFLAKE_DATABASE="YOUR_SNOWFLAKE_DATABASE",
|
|
SNOWFLAKE_SCHEMA="YOUR_SNOWFLAKE_SCHEMA",
|
|
SNOWFLAKE_WAREHOUSE="YOUR_SNOWFLAKE_WAREHOUSE"
|
|
SNOWFLAKE_ROLE="YOUR_SNOWFLAKE_ROLE",
|
|
"""
|
|
|
|
import pytest
|
|
from langchain_core.messages import BaseMessage, HumanMessage, SystemMessage
|
|
from langchain_core.outputs import ChatGeneration, LLMResult
|
|
|
|
from langchain_community.chat_models import ChatSnowflakeCortex
|
|
|
|
|
|
@pytest.fixture
|
|
def chat() -> ChatSnowflakeCortex:
|
|
return ChatSnowflakeCortex()
|
|
|
|
|
|
def test_chat_snowflake_cortex(chat: ChatSnowflakeCortex) -> None:
|
|
"""Test ChatSnowflakeCortex."""
|
|
message = HumanMessage(content="Hello")
|
|
response = chat([message])
|
|
assert isinstance(response, BaseMessage)
|
|
assert isinstance(response.content, str)
|
|
|
|
|
|
def test_chat_snowflake_cortex_system_message(chat: ChatSnowflakeCortex) -> None:
|
|
"""Test ChatSnowflakeCortex for system message"""
|
|
system_message = SystemMessage(content="You are to chat with the user.")
|
|
human_message = HumanMessage(content="Hello")
|
|
response = chat([system_message, human_message])
|
|
assert isinstance(response, BaseMessage)
|
|
assert isinstance(response.content, str)
|
|
|
|
|
|
def test_chat_snowflake_cortex_model() -> None:
|
|
"""Test ChatSnowflakeCortex handles model_name."""
|
|
chat = ChatSnowflakeCortex(
|
|
model="foo",
|
|
)
|
|
assert chat.model == "foo"
|
|
|
|
|
|
def test_chat_snowflake_cortex_generate(chat: ChatSnowflakeCortex) -> None:
|
|
"""Test ChatSnowflakeCortex with generate."""
|
|
message = HumanMessage(content="Hello")
|
|
response = chat.generate([[message], [message]])
|
|
assert isinstance(response, LLMResult)
|
|
assert len(response.generations) == 2
|
|
for generations in response.generations:
|
|
for generation in generations:
|
|
assert isinstance(generation, ChatGeneration)
|
|
assert isinstance(generation.text, str)
|
|
assert generation.text == generation.message.content
|