core[patch]: support bedrock converse -> openai tool (#27754)

This commit is contained in:
Bagatur 2024-10-30 12:20:39 -07:00 committed by GitHub
parent 3dfdb3e6fb
commit 94ea950c6c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 39 additions and 2 deletions

View File

@ -341,8 +341,8 @@ def convert_to_openai_function(
A dictionary, Pydantic BaseModel class, TypedDict class, a LangChain A dictionary, Pydantic BaseModel class, TypedDict class, a LangChain
Tool object, or a Python function. If a dictionary is passed in, it is Tool object, or a Python function. If a dictionary is passed in, it is
assumed to already be a valid OpenAI function, a JSON schema with assumed to already be a valid OpenAI function, a JSON schema with
top-level 'title' and 'description' keys specified, or an Anthropic format top-level 'title' and 'description' keys specified, an Anthropic format
tool. tool, or an Amazon Bedrock Converse format tool.
strict: strict:
If True, model output is guaranteed to exactly match the JSON Schema If True, model output is guaranteed to exactly match the JSON Schema
provided in the function definition. If None, ``strict`` argument will not provided in the function definition. If None, ``strict`` argument will not
@ -362,6 +362,10 @@ def convert_to_openai_function(
.. versionchanged:: 0.3.13 .. versionchanged:: 0.3.13
Support for Anthropic format tools added. Support for Anthropic format tools added.
.. versionchanged:: 0.3.14
Support for Amazon Bedrock Converse format tools added.
""" """
from langchain_core.tools import BaseTool from langchain_core.tools import BaseTool
@ -389,6 +393,13 @@ def convert_to_openai_function(
"description": function["description"], "description": function["description"],
"parameters": function["input_schema"], "parameters": function["input_schema"],
} }
# an Amazon Bedrock Converse format tool
elif isinstance(function, dict) and "toolSpec" in function:
oai_function = {
"name": function["toolSpec"]["name"],
"description": function["toolSpec"]["description"],
"parameters": function["toolSpec"]["inputSchema"]["json"],
}
elif isinstance(function, type) and is_basemodel_subclass(function): elif isinstance(function, type) and is_basemodel_subclass(function):
oai_function = cast(dict, convert_pydantic_to_openai_function(function)) oai_function = cast(dict, convert_pydantic_to_openai_function(function))
elif is_typeddict(function): elif is_typeddict(function):

View File

@ -230,6 +230,30 @@ def anthropic_tool() -> dict:
} }
@pytest.fixture()
def bedrock_converse_tool() -> dict:
return {
"toolSpec": {
"name": "dummy_function",
"description": "dummy function",
"inputSchema": {
"json": {
"type": "object",
"properties": {
"arg1": {"description": "foo", "type": "integer"},
"arg2": {
"description": "one of 'bar', 'baz'",
"enum": ["bar", "baz"],
"type": "string",
},
},
"required": ["arg1", "arg2"],
}
},
}
}
class Dummy: class Dummy:
def dummy_function(self, arg1: int, arg2: Literal["bar", "baz"]) -> None: def dummy_function(self, arg1: int, arg2: Literal["bar", "baz"]) -> None:
"""dummy function """dummy function
@ -258,6 +282,7 @@ def test_convert_to_openai_function(
dummy_tool: BaseTool, dummy_tool: BaseTool,
json_schema: dict, json_schema: dict,
anthropic_tool: dict, anthropic_tool: dict,
bedrock_converse_tool: dict,
annotated_function: Callable, annotated_function: Callable,
dummy_pydantic: type[BaseModel], dummy_pydantic: type[BaseModel],
runnable: Runnable, runnable: Runnable,
@ -290,6 +315,7 @@ def test_convert_to_openai_function(
dummy_tool, dummy_tool,
json_schema, json_schema,
anthropic_tool, anthropic_tool,
bedrock_converse_tool,
expected, expected,
Dummy.dummy_function, Dummy.dummy_function,
DummyWithClassMethod.dummy_function, DummyWithClassMethod.dummy_function,