standard-tests[patch]: add anthropic format integration test (#23717)

This commit is contained in:
Bagatur 2024-07-01 11:06:04 -04:00 committed by GitHub
parent 4b9517db85
commit 389a568f9a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 70 additions and 1 deletions

View File

@ -20,3 +20,7 @@ class TestAnthropicStandard(ChatModelIntegrationTests):
@property @property
def supports_image_inputs(self) -> bool: def supports_image_inputs(self) -> bool:
return True return True
@property
def supports_anthropic_inputs(self) -> bool:
return True

View File

@ -1,6 +1,6 @@
import base64 import base64
import json import json
from typing import Optional from typing import List, Optional
import httpx import httpx
import pytest import pytest
@ -10,6 +10,7 @@ from langchain_core.messages import (
AIMessageChunk, AIMessageChunk,
BaseMessageChunk, BaseMessageChunk,
HumanMessage, HumanMessage,
SystemMessage,
ToolMessage, ToolMessage,
) )
from langchain_core.pydantic_v1 import BaseModel, Field from langchain_core.pydantic_v1 import BaseModel, Field
@ -283,3 +284,63 @@ class ChatModelIntegrationTests(ChatModelTests):
], ],
) )
model.invoke([message]) model.invoke([message])
def test_anthropic_inputs(self, model: BaseChatModel) -> None:
if not self.supports_anthropic_inputs:
return
class color_picker(BaseModel):
"""Input your fav color and get a random fact about it."""
fav_color: str
human_content: List[dict] = [
{
"type": "text",
"text": "what's your favorite color in this image",
},
]
if self.supports_image_inputs:
image_url = "https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg"
image_data = base64.b64encode(httpx.get(image_url).content).decode("utf-8")
human_content.append(
{
"type": "image",
"source": {
"type": "base64",
"media_type": "image/jpeg",
"data": image_data,
},
}
)
messages = [
SystemMessage("you're a good assistant"),
HumanMessage(human_content), # type: ignore[arg-type]
AIMessage(
[
{"type": "text", "text": "Hmm let me think about that"},
{
"type": "tool_use",
"input": {"fav_color": "green"},
"id": "foo",
"name": "color_picker",
},
]
),
HumanMessage(
[
{
"type": "tool_result",
"tool_use_id": "foo",
"content": [
{
"type": "text",
"text": "green is a great pick! that's my sister's favorite color", # noqa: E501
}
],
},
{"type": "text", "text": "what's my sister's favorite color"},
]
),
]
model.bind_tools([color_picker]).invoke(messages)

View File

@ -70,6 +70,10 @@ class ChatModelTests(ABC):
def returns_usage_metadata(self) -> bool: def returns_usage_metadata(self) -> bool:
return True return True
@property
def supports_anthropic_inputs(self) -> bool:
return False
class ChatModelUnitTests(ChatModelTests): class ChatModelUnitTests(ChatModelTests):
@property @property