core,openai,grow,fw[patch]: deprecate bind_functions, update chat mod… (#26584)

…el api ref
pull/26591/head
Bagatur 5 days ago committed by GitHub
parent 7c05f71e0f
commit e1d113ea84
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -15,7 +15,7 @@
:member-order: groupwise :member-order: groupwise
:show-inheritance: True :show-inheritance: True
:special-members: __call__ :special-members: __call__
:exclude-members: construct, copy, dict, from_orm, parse_file, parse_obj, parse_raw, schema, schema_json, update_forward_refs, validate, json, is_lc_serializable, to_json_not_implemented, lc_secrets, lc_attributes, lc_id, get_lc_namespace, astream_log, transform, atransform, get_output_schema, get_prompts, config_schema, map, pick, pipe, with_listeners, with_alisteners, with_config, with_fallbacks, with_types, with_retry, InputType, OutputType, config_specs, output_schema, get_input_schema, get_graph, get_name, input_schema, name, bind, assign, as_tool, get_config_jsonschema, get_input_jsonschema, get_output_jsonschema, model_construct, model_copy, model_dump, model_dump_json, model_parametrized_name, model_post_init, model_rebuild, model_validate, model_validate_json, model_validate_strings, to_json, model_extra, model_fields_set, model_json_schema :exclude-members: construct, copy, dict, from_orm, parse_file, parse_obj, parse_raw, schema, schema_json, update_forward_refs, validate, json, is_lc_serializable, to_json_not_implemented, lc_secrets, lc_attributes, lc_id, get_lc_namespace, astream_log, transform, atransform, get_output_schema, get_prompts, config_schema, map, pick, pipe, InputType, OutputType, config_specs, output_schema, get_input_schema, get_graph, get_name, input_schema, name, assign, as_tool, get_config_jsonschema, get_input_jsonschema, get_output_jsonschema, model_construct, model_copy, model_dump, model_dump_json, model_parametrized_name, model_post_init, model_rebuild, model_validate, model_validate_json, model_validate_strings, to_json, model_extra, model_fields_set, model_json_schema, predict, apredict, predict_messages, apredict_messages, generate, generate_prompt, agenerate, agenerate_prompt, call_as_llm
.. NOTE:: {{objname}} implements the standard :py:class:`Runnable Interface <langchain_core.runnables.base.Runnable>`. 🏃 .. NOTE:: {{objname}} implements the standard :py:class:`Runnable Interface <langchain_core.runnables.base.Runnable>`. 🏃

@ -333,9 +333,26 @@ def deprecated(
old_doc = "" old_doc = ""
# Modify the docstring to include a deprecation notice. # Modify the docstring to include a deprecation notice.
if (
_alternative
and _alternative.split(".")[-1].lower() == _alternative.split(".")[-1]
):
_alternative = f":meth:`~{_alternative}`"
elif _alternative:
_alternative = f":class:`~{_alternative}`"
if (
_alternative_import
and _alternative_import.split(".")[-1].lower()
== _alternative_import.split(".")[-1]
):
_alternative_import = f":meth:`~{_alternative_import}`"
elif _alternative_import:
_alternative_import = f":class:`~{_alternative_import}`"
components = [ components = [
_message, _message,
f"Use ``{_alternative}`` instead." if _alternative else "", f"Use {_alternative} instead." if _alternative else "",
f"Use ``{_alternative_import}`` instead." if _alternative_import else "", f"Use ``{_alternative_import}`` instead." if _alternative_import else "",
_addendum, _addendum,
] ]

@ -24,6 +24,7 @@ from typing import (
) )
from fireworks.client import AsyncFireworks, Fireworks # type: ignore from fireworks.client import AsyncFireworks, Fireworks # type: ignore
from langchain_core._api import deprecated
from langchain_core.callbacks import ( from langchain_core.callbacks import (
AsyncCallbackManagerForLLMRun, AsyncCallbackManagerForLLMRun,
CallbackManagerForLLMRun, CallbackManagerForLLMRun,
@ -626,6 +627,11 @@ class ChatFireworks(BaseChatModel):
"""Return type of chat model.""" """Return type of chat model."""
return "fireworks-chat" return "fireworks-chat"
@deprecated(
since="0.2.1",
alternative="langchain_fireworks.chat_models.ChatFireworks.bind_tools",
removal="0.3.0",
)
def bind_functions( def bind_functions(
self, self,
functions: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]], functions: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]],
@ -705,8 +711,8 @@ class ChatFireworks(BaseChatModel):
with the option to not call any function, "any" to enforce that some with the option to not call any function, "any" to enforce that some
function is called, or a dict of the form: function is called, or a dict of the form:
{"type": "function", "function": {"name": <<tool_name>>}}. {"type": "function", "function": {"name": <<tool_name>>}}.
**kwargs: Any additional parameters to pass to the **kwargs: Any additional parameters to pass to
:class:`~langchain.runnable.Runnable` constructor. :meth:`~langchain_fireworks.chat_models.ChatFireworks.bind`
""" """
formatted_tools = [convert_to_openai_tool(tool) for tool in tools] formatted_tools = [convert_to_openai_tool(tool) for tool in tools]

@ -23,6 +23,7 @@ from typing import (
cast, cast,
) )
from langchain_core._api import deprecated
from langchain_core.callbacks import ( from langchain_core.callbacks import (
AsyncCallbackManagerForLLMRun, AsyncCallbackManagerForLLMRun,
CallbackManagerForLLMRun, CallbackManagerForLLMRun,
@ -650,6 +651,11 @@ class ChatGroq(BaseChatModel):
combined["system_fingerprint"] = system_fingerprint combined["system_fingerprint"] = system_fingerprint
return combined return combined
@deprecated(
since="0.2.1",
alternative="langchain_groq.chat_models.ChatGroq.bind_tools",
removal="0.3.0",
)
def bind_functions( def bind_functions(
self, self,
functions: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]], functions: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]],
@ -674,8 +680,8 @@ class ChatGroq(BaseChatModel):
Must be the name of the single provided function or Must be the name of the single provided function or
"auto" to automatically determine which function to call "auto" to automatically determine which function to call
(if any). (if any).
**kwargs: Any additional parameters to pass to the **kwargs: Any additional parameters to pass to
:class:`~langchain.runnable.Runnable` constructor. :meth:`~langchain_groq.chat_models.ChatGroq.bind`.
""" """
formatted_functions = [convert_to_openai_function(fn) for fn in functions] formatted_functions = [convert_to_openai_function(fn) for fn in functions]

@ -33,6 +33,7 @@ from urllib.parse import urlparse
import openai import openai
import tiktoken import tiktoken
from langchain_core._api.deprecation import deprecated
from langchain_core.callbacks import ( from langchain_core.callbacks import (
AsyncCallbackManagerForLLMRun, AsyncCallbackManagerForLLMRun,
CallbackManagerForLLMRun, CallbackManagerForLLMRun,
@ -966,6 +967,11 @@ class BaseChatOpenAI(BaseChatModel):
num_tokens += 3 num_tokens += 3
return num_tokens return num_tokens
@deprecated(
since="0.2.1",
alternative="langchain_openai.chat_models.base.ChatOpenAI.bind_tools",
removal="0.3.0",
)
def bind_functions( def bind_functions(
self, self,
functions: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]], functions: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]],
@ -1033,22 +1039,18 @@ class BaseChatOpenAI(BaseChatModel):
Assumes model is compatible with OpenAI tool-calling API. Assumes model is compatible with OpenAI tool-calling API.
.. versionchanged:: 0.1.21
Support for ``strict`` argument added.
Args: Args:
tools: A list of tool definitions to bind to this chat model. tools: A list of tool definitions to bind to this chat model.
Supports any tool definition handled by Supports any tool definition handled by
:meth:`langchain_core.utils.function_calling.convert_to_openai_tool`. :meth:`langchain_core.utils.function_calling.convert_to_openai_tool`.
tool_choice: Which tool to require the model to call. tool_choice: Which tool to require the model to call. Options are:
Options are:
- str of the form ``"<<tool_name>>"``: calls <<tool_name>> tool. - str of the form ``"<<tool_name>>"``: calls <<tool_name>> tool.
- ``"auto"``: automatically selects a tool (including no tool). - ``"auto"``: automatically selects a tool (including no tool).
- ``"none"``: does not call a tool. - ``"none"``: does not call a tool.
- ``"any"`` or ``"required"`` or ``True``: force at least one tool to be called. - ``"any"`` or ``"required"`` or ``True``: force at least one tool to be called.
- dict of the form ``{"type": "function", "function": {"name": <<tool_name>>}}``: calls <<tool_name>> tool. - dict of the form ``{"type": "function", "function": {"name": <<tool_name>>}}``: calls <<tool_name>> tool.
- ``False`` or ``None``: no effect, default OpenAI behavior. - ``False`` or ``None``: no effect, default OpenAI behavior.
strict: If True, model output is guaranteed to exactly match the JSON Schema strict: If True, model output is guaranteed to exactly match the JSON Schema
provided in the tool definition. If True, the input schema will be provided in the tool definition. If True, the input schema will be
validated according to validated according to
@ -1056,11 +1058,13 @@ class BaseChatOpenAI(BaseChatModel):
If False, input schema will not be validated and model output will not If False, input schema will not be validated and model output will not
be validated. be validated.
If None, ``strict`` argument will not be passed to the model. If None, ``strict`` argument will not be passed to the model.
kwargs: Any additional parameters are passed directly to
:meth:`~langchain_openai.chat_models.base.ChatOpenAI.bind`.
.. versionadded:: 0.1.21 .. versionchanged:: 0.1.21
Support for ``strict`` argument added.
kwargs: Any additional parameters are passed directly to
``self.bind(**kwargs)``.
""" # noqa: E501 """ # noqa: E501
formatted_tools = [ formatted_tools = [

Loading…
Cancel
Save