From 1281fdf0f29e330c539436d25c16bdf5a3dceb4b Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Tue, 13 Jun 2023 10:52:54 -0700 Subject: [PATCH] Harrison/notebook functions (#6103) --- docs/modules/agents/tools.rst | 9 ++ .../tools/tools_as_openai_functions.ipynb | 138 ++++++++++++++++++ 2 files changed, 147 insertions(+) create mode 100644 docs/modules/agents/tools/tools_as_openai_functions.ipynb diff --git a/docs/modules/agents/tools.rst b/docs/modules/agents/tools.rst index e9454349..02e134a1 100644 --- a/docs/modules/agents/tools.rst +++ b/docs/modules/agents/tools.rst @@ -25,6 +25,15 @@ Next, we have some examples of customizing and generically working with tools ./tools/custom_tools.ipynb ./tools/multi_input_tool.ipynb ./tools/tool_input_validation.ipynb + ./tools/human_approval.ipynb + +Tools are also usable outside of the LangChain ecosystem! Here are examples of doing so + +.. toctree:: + :maxdepth: 1 + :glob: + + ./tools/tools_as_openai_functions.ipynb In this documentation we cover generic tooling functionality (eg how to create your own) diff --git a/docs/modules/agents/tools/tools_as_openai_functions.ipynb b/docs/modules/agents/tools/tools_as_openai_functions.ipynb new file mode 100644 index 00000000..6fe92dfd --- /dev/null +++ b/docs/modules/agents/tools/tools_as_openai_functions.ipynb @@ -0,0 +1,138 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "4111c9d4", + "metadata": {}, + "source": [ + "# Tools as OpenAI Functions\n", + "\n", + "This notebook goes over how to use LangChain tools as OpenAI functions." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "d65d8a60", + "metadata": {}, + "outputs": [], + "source": [ + "from langchain.chat_models import ChatOpenAI\n", + "from langchain.schema import HumanMessage" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "abd8dc72", + "metadata": {}, + "outputs": [], + "source": [ + "model = ChatOpenAI(model=\"gpt-3.5-turbo-0613\")" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "dce2cdb7", + "metadata": {}, + "outputs": [], + "source": [ + "from langchain.tools import MoveFileTool, format_tool_to_openai_function" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "3b3dc766", + "metadata": {}, + "outputs": [], + "source": [ + "tools = [MoveFileTool()]\n", + "functions = [format_tool_to_openai_function(t) for t in tools]" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "230a7939", + "metadata": {}, + "outputs": [], + "source": [ + "message = model.predict_messages([HumanMessage(content='move file foo to bar')], functions=functions)" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "c118c940", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "AIMessage(content='', additional_kwargs={'function_call': {'name': 'move_file', 'arguments': '{\\n \"source_path\": \"foo\",\\n \"destination_path\": \"bar\"\\n}'}}, example=False)" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "message" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "d618e3d8", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'name': 'move_file',\n", + " 'arguments': '{\\n \"source_path\": \"foo\",\\n \"destination_path\": \"bar\"\\n}'}" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "message.additional_kwargs['function_call']" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "751da79f", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.1" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +}