From bb597b1286e55e794bf0b82bd4a6e11fc9eb4450 Mon Sep 17 00:00:00 2001 From: Igor Drozdov Date: Fri, 5 Jul 2024 15:19:41 +0200 Subject: [PATCH] feat(community): add bind_tools function for ChatLiteLLM (#23823) It's a follow-up to https://github.com/langchain-ai/langchain/pull/23765 Now the tools can be bound by calling `bind_tools` ```python from langchain_core.pydantic_v1 import BaseModel, Field from langchain_core.utils.function_calling import convert_to_openai_tool from langchain_community.chat_models import ChatLiteLLM class GetWeather(BaseModel): '''Get the current weather in a given location''' location: str = Field(..., description="The city and state, e.g. San Francisco, CA") class GetPopulation(BaseModel): '''Get the current population in a given location''' location: str = Field(..., description="The city and state, e.g. San Francisco, CA") prompt = "Which city is hotter today and which is bigger: LA or NY?" # tools = [convert_to_openai_tool(GetWeather), convert_to_openai_tool(GetPopulation)] tools = [GetWeather, GetPopulation] llm = ChatLiteLLM(model="claude-3-sonnet-20240229").bind_tools(tools) ai_msg = llm.invoke(prompt) print(ai_msg.tool_calls) ``` If no one reviews your PR within a few days, please @-mention one of baskaryan, efriis, eyurtsev, ccurme, vbarda, hwchase17. Co-authored-by: Igor Drozdov --- .../chat_models/litellm.py | 33 ++++++++++++++++++- 1 file changed, 32 insertions(+), 1 deletion(-) diff --git a/libs/community/langchain_community/chat_models/litellm.py b/libs/community/langchain_community/chat_models/litellm.py index 18ad918914..e57a5b0725 100644 --- a/libs/community/langchain_community/chat_models/litellm.py +++ b/libs/community/langchain_community/chat_models/litellm.py @@ -12,6 +12,7 @@ from typing import ( List, Mapping, Optional, + Sequence, Tuple, Type, Union, @@ -21,6 +22,7 @@ from langchain_core.callbacks import ( AsyncCallbackManagerForLLMRun, CallbackManagerForLLMRun, ) +from langchain_core.language_models import LanguageModelInput from langchain_core.language_models.chat_models import ( BaseChatModel, agenerate_from_stream, @@ -46,8 +48,11 @@ from langchain_core.outputs import ( ChatGenerationChunk, ChatResult, ) -from langchain_core.pydantic_v1 import Field, root_validator +from langchain_core.pydantic_v1 import BaseModel, Field, root_validator +from langchain_core.runnables import Runnable +from langchain_core.tools import BaseTool from langchain_core.utils import get_from_dict_or_env +from langchain_core.utils.function_calling import convert_to_openai_tool logger = logging.getLogger(__name__) @@ -411,6 +416,32 @@ class ChatLiteLLM(BaseChatModel): ) return self._create_chat_result(response) + def bind_tools( + self, + tools: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]], + **kwargs: Any, + ) -> Runnable[LanguageModelInput, BaseMessage]: + """Bind tool-like objects to this chat model. + + LiteLLM expects tools argument in OpenAI format. + + Args: + tools: A list of tool definitions to bind to this chat model. + Can be a dictionary, pydantic model, callable, or BaseTool. Pydantic + models, callables, and BaseTools will be automatically converted to + their schema dictionary representation. + tool_choice: Which tool to require the model to call. + Must be the name of the single provided function or + "auto" to automatically determine which function to call + (if any), or a dict of the form: + {"type": "function", "function": {"name": <>}}. + **kwargs: Any additional parameters to pass to the + :class:`~langchain.runnable.Runnable` constructor. + """ + + formatted_tools = [convert_to_openai_tool(tool) for tool in tools] + return super().bind(tools=formatted_tools, **kwargs) + @property def _identifying_params(self) -> Dict[str, Any]: """Get the identifying parameters."""