docs: tool call run model (#23609)

This commit is contained in:
Bagatur 2024-06-27 15:02:12 -07:00 committed by GitHub
parent 75a44fe951
commit ef0593db58
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -116,7 +116,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
@ -164,22 +164,14 @@
},
{
"cell_type": "code",
"execution_count": 9,
"execution_count": 3,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Note: you may need to restart the kernel to use updated packages.\n"
]
}
],
"outputs": [],
"source": [
"# | output: false\n",
"# | echo: false\n",
"\n",
"%pip install -qU langchain langchain_openai\n",
"%pip install -qU langchain_openai\n",
"\n",
"import os\n",
"from getpass import getpass\n",
@ -193,18 +185,33 @@
},
{
"cell_type": "code",
"execution_count": 10,
"execution_count": 8,
"metadata": {},
"outputs": [],
"outputs": [
{
"data": {
"text/plain": [
"AIMessage(content='', additional_kwargs={'tool_calls': [{'id': 'call_z9fCZJPE8AmtnIiD08Q6Najb', 'function': {'arguments': '{\"a\":3,\"b\":12}', 'name': 'Multiply'}, 'type': 'function'}]}, response_metadata={'token_usage': {'completion_tokens': 18, 'prompt_tokens': 95, 'total_tokens': 113}, 'model_name': 'gpt-3.5-turbo-0125', 'system_fingerprint': None, 'finish_reason': 'tool_calls', 'logprobs': None}, id='run-7564c56a-3760-41f0-94fa-5c646bb11e60-0', tool_calls=[{'name': 'Multiply', 'args': {'a': 3, 'b': 12}, 'id': 'call_z9fCZJPE8AmtnIiD08Q6Najb'}], usage_metadata={'input_tokens': 95, 'output_tokens': 18, 'total_tokens': 113})"
]
},
"execution_count": 8,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"llm_with_tools = llm.bind_tools(tools)"
"llm_with_tools = llm.bind_tools(tools)\n",
"\n",
"query = \"What is 3 * 12?\"\n",
"\n",
"llm_with_tools.invoke(query)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"As we can see, even though the prompt didn't really suggest a tool call, our LLM made one since it was forced to do so. You can look at the docs for [`bind_tool`](https://api.python.langchain.com/en/latest/chat_models/langchain_openai.chat_models.base.BaseChatOpenAI.html#langchain_openai.chat_models.base.BaseChatOpenAI.bind_tools) to learn about all the ways to customize how your LLM selects tools."
"As we can see, even though the prompt didn't really suggest a tool call, our LLM made one since it was forced to do so. You can look at the docs for [bind_tools()](https://api.python.langchain.com/en/latest/chat_models/langchain_openai.chat_models.base.BaseChatOpenAI.html#langchain_openai.chat_models.base.BaseChatOpenAI.bind_tools) to learn about all the ways to customize how your LLM selects tools."
]
},
{
@ -311,10 +318,24 @@
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"name": "python"
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.9"
}
},
"nbformat": 4,
"nbformat_minor": 2
"nbformat_minor": 4
}