2023-12-20 02:55:19 +00:00
|
|
|
"""Test ChatAnthropicMessages chat model."""
|
|
|
|
from langchain_core.prompts import ChatPromptTemplate
|
|
|
|
|
|
|
|
from langchain_anthropic.chat_models import ChatAnthropicMessages
|
|
|
|
|
|
|
|
|
|
|
|
def test_stream() -> None:
|
Fix: fix partners name typo in tests (#15066)
<!-- Thank you for contributing to LangChain!
Please title your PR "<package>: <description>", where <package> is
whichever of langchain, community, core, experimental, etc. is being
modified.
Replace this entire comment with:
- **Description:** a description of the change,
- **Issue:** the issue # it fixes if applicable,
- **Dependencies:** any dependencies required for this change,
- **Twitter handle:** we announce bigger features on Twitter. If your PR
gets announced, and you'd like a mention, we'll gladly shout you out!
Please make sure your PR is passing linting and testing before
submitting. Run `make format`, `make lint` and `make test` from the root
of the package you've modified to check this locally.
See contribution guidelines for more information on how to write/run
tests, lint, etc: https://python.langchain.com/docs/contributing/
If you're adding a new integration, please include:
1. a test for the integration, preferably unit tests that do not rely on
network access,
2. an example notebook showing its use. It lives in
`docs/docs/integrations` directory.
If no one reviews your PR within a few days, please @-mention one of
@baskaryan, @eyurtsev, @hwchase17.
-->
---------
Co-authored-by: Harrison Chase <hw.chase.17@gmail.com>
Co-authored-by: Ran <rccalman@gmail.com>
2023-12-22 19:48:39 +00:00
|
|
|
"""Test streaming tokens from Anthropic."""
|
2023-12-20 02:55:19 +00:00
|
|
|
llm = ChatAnthropicMessages(model_name="claude-instant-1.2")
|
|
|
|
|
|
|
|
for token in llm.stream("I'm Pickle Rick"):
|
|
|
|
assert isinstance(token.content, str)
|
|
|
|
|
|
|
|
|
|
|
|
async def test_astream() -> None:
|
Fix: fix partners name typo in tests (#15066)
<!-- Thank you for contributing to LangChain!
Please title your PR "<package>: <description>", where <package> is
whichever of langchain, community, core, experimental, etc. is being
modified.
Replace this entire comment with:
- **Description:** a description of the change,
- **Issue:** the issue # it fixes if applicable,
- **Dependencies:** any dependencies required for this change,
- **Twitter handle:** we announce bigger features on Twitter. If your PR
gets announced, and you'd like a mention, we'll gladly shout you out!
Please make sure your PR is passing linting and testing before
submitting. Run `make format`, `make lint` and `make test` from the root
of the package you've modified to check this locally.
See contribution guidelines for more information on how to write/run
tests, lint, etc: https://python.langchain.com/docs/contributing/
If you're adding a new integration, please include:
1. a test for the integration, preferably unit tests that do not rely on
network access,
2. an example notebook showing its use. It lives in
`docs/docs/integrations` directory.
If no one reviews your PR within a few days, please @-mention one of
@baskaryan, @eyurtsev, @hwchase17.
-->
---------
Co-authored-by: Harrison Chase <hw.chase.17@gmail.com>
Co-authored-by: Ran <rccalman@gmail.com>
2023-12-22 19:48:39 +00:00
|
|
|
"""Test streaming tokens from Anthropic."""
|
2023-12-20 02:55:19 +00:00
|
|
|
llm = ChatAnthropicMessages(model_name="claude-instant-1.2")
|
|
|
|
|
|
|
|
async for token in llm.astream("I'm Pickle Rick"):
|
|
|
|
assert isinstance(token.content, str)
|
|
|
|
|
|
|
|
|
|
|
|
async def test_abatch() -> None:
|
|
|
|
"""Test streaming tokens from ChatAnthropicMessages."""
|
|
|
|
llm = ChatAnthropicMessages(model_name="claude-instant-1.2")
|
|
|
|
|
|
|
|
result = await llm.abatch(["I'm Pickle Rick", "I'm not Pickle Rick"])
|
|
|
|
for token in result:
|
|
|
|
assert isinstance(token.content, str)
|
|
|
|
|
|
|
|
|
|
|
|
async def test_abatch_tags() -> None:
|
|
|
|
"""Test batch tokens from ChatAnthropicMessages."""
|
|
|
|
llm = ChatAnthropicMessages(model_name="claude-instant-1.2")
|
|
|
|
|
|
|
|
result = await llm.abatch(
|
|
|
|
["I'm Pickle Rick", "I'm not Pickle Rick"], config={"tags": ["foo"]}
|
|
|
|
)
|
|
|
|
for token in result:
|
|
|
|
assert isinstance(token.content, str)
|
|
|
|
|
|
|
|
|
|
|
|
def test_batch() -> None:
|
|
|
|
"""Test batch tokens from ChatAnthropicMessages."""
|
|
|
|
llm = ChatAnthropicMessages(model_name="claude-instant-1.2")
|
|
|
|
|
|
|
|
result = llm.batch(["I'm Pickle Rick", "I'm not Pickle Rick"])
|
|
|
|
for token in result:
|
|
|
|
assert isinstance(token.content, str)
|
|
|
|
|
|
|
|
|
|
|
|
async def test_ainvoke() -> None:
|
|
|
|
"""Test invoke tokens from ChatAnthropicMessages."""
|
|
|
|
llm = ChatAnthropicMessages(model_name="claude-instant-1.2")
|
|
|
|
|
|
|
|
result = await llm.ainvoke("I'm Pickle Rick", config={"tags": ["foo"]})
|
|
|
|
assert isinstance(result.content, str)
|
|
|
|
|
|
|
|
|
|
|
|
def test_invoke() -> None:
|
|
|
|
"""Test invoke tokens from ChatAnthropicMessages."""
|
|
|
|
llm = ChatAnthropicMessages(model_name="claude-instant-1.2")
|
|
|
|
|
|
|
|
result = llm.invoke("I'm Pickle Rick", config=dict(tags=["foo"]))
|
|
|
|
assert isinstance(result.content, str)
|
|
|
|
|
|
|
|
|
|
|
|
def test_system_invoke() -> None:
|
|
|
|
"""Test invoke tokens with a system message"""
|
|
|
|
llm = ChatAnthropicMessages(model_name="claude-instant-1.2")
|
|
|
|
|
|
|
|
prompt = ChatPromptTemplate.from_messages(
|
|
|
|
[
|
|
|
|
(
|
|
|
|
"system",
|
|
|
|
"You are an expert cartographer. If asked, you are a cartographer. "
|
|
|
|
"STAY IN CHARACTER",
|
|
|
|
),
|
|
|
|
("human", "Are you a mathematician?"),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
|
|
|
chain = prompt | llm
|
|
|
|
|
|
|
|
result = chain.invoke({})
|
|
|
|
assert isinstance(result.content, str)
|