From b9ad214801cfcccebccb8605ba0c4980fae2c425 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Fri, 27 Jan 2023 07:10:26 -0800 Subject: [PATCH] add docs for loading from hub (#763) --- .../agents/examples/load_from_hub.ipynb | 95 +++++++++++ docs/modules/agents/how_to_guides.rst | 2 + docs/modules/chains/generic/from_hub.ipynb | 157 ++++++++++++++++++ docs/modules/chains/how_to_guides.rst | 4 + 4 files changed, 258 insertions(+) create mode 100644 docs/modules/agents/examples/load_from_hub.ipynb create mode 100644 docs/modules/chains/generic/from_hub.ipynb diff --git a/docs/modules/agents/examples/load_from_hub.ipynb b/docs/modules/agents/examples/load_from_hub.ipynb new file mode 100644 index 00000000..7b5e0b1b --- /dev/null +++ b/docs/modules/agents/examples/load_from_hub.ipynb @@ -0,0 +1,95 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "991b1cc1", + "metadata": {}, + "source": [ + "# Loading from LangChainHub\n", + "\n", + "This notebook covers how to load agents from [LangChainHub](https://github.com/hwchase17/langchain-hub)." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "bd4450a2", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "\n", + "\u001b[1m> Entering new AgentExecutor chain...\u001b[0m\n", + "\u001b[32;1m\u001b[1;3m Yes.\n", + "Follow up: Who is the reigning men's U.S. Open champion?\u001b[0m\n", + "Intermediate answer: \u001b[36;1m\u001b[1;3m2016 · SUI · Stan Wawrinka ; 2017 · ESP · Rafael Nadal ; 2018 · SRB · Novak Djokovic ; 2019 · ESP · Rafael Nadal.\u001b[0m\n", + "\u001b[32;1m\u001b[1;3mSo the reigning men's U.S. Open champion is Rafael Nadal.\n", + "Follow up: What is Rafael Nadal's hometown?\u001b[0m\n", + "Intermediate answer: \u001b[36;1m\u001b[1;3mIn 2016, he once again showed his deep ties to Mallorca and opened the Rafa Nadal Academy in his hometown of Manacor.\u001b[0m\n", + "\u001b[32;1m\u001b[1;3mSo the final answer is: Manacor, Mallorca, Spain.\u001b[0m\n", + "\n", + "\u001b[1m> Finished chain.\u001b[0m\n" + ] + }, + { + "data": { + "text/plain": [ + "'Manacor, Mallorca, Spain.'" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from langchain import OpenAI, SerpAPIWrapper\n", + "from langchain.agents import initialize_agent, Tool\n", + "\n", + "llm = OpenAI(temperature=0)\n", + "search = SerpAPIWrapper()\n", + "tools = [\n", + " Tool(\n", + " name=\"Intermediate Answer\",\n", + " func=search.run\n", + " )\n", + "]\n", + "\n", + "self_ask_with_search = initialize_agent(tools, llm, agent_path=\"lc://agents/self-ask-with-search/agent.json\", verbose=True)\n", + "self_ask_with_search.run(\"What is the hometown of the reigning men's U.S. Open champion?\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e679f7b6", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/modules/agents/how_to_guides.rst b/docs/modules/agents/how_to_guides.rst index b891353a..7443bbb8 100644 --- a/docs/modules/agents/how_to_guides.rst +++ b/docs/modules/agents/how_to_guides.rst @@ -3,6 +3,8 @@ How-To Guides The first category of how-to guides here cover specific parts of working with agents. +`Load From Hub <./examples/load_from_hub.html>`_: This notebook covers how to load agents from `LangChainHub `_. + `Custom Tools <./examples/custom_tools.html>`_: How to create custom tools that an agent can use. `Intermediate Steps <./examples/intermediate_steps.html>`_: How to access and use intermediate steps to get more visibility into the internals of an agent. diff --git a/docs/modules/chains/generic/from_hub.ipynb b/docs/modules/chains/generic/from_hub.ipynb new file mode 100644 index 00000000..d3fcbedd --- /dev/null +++ b/docs/modules/chains/generic/from_hub.ipynb @@ -0,0 +1,157 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "25c90e9e", + "metadata": {}, + "source": [ + "# Loading from LangChainHub\n", + "\n", + "This notebook covers how to load chains from [LangChainHub](https://github.com/hwchase17/langchain-hub)." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "8b54479e", + "metadata": {}, + "outputs": [], + "source": [ + "from langchain.chains import load_chain\n", + "\n", + "chain = load_chain(\"lc://chains/llm-math/chain.json\")" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "4828f31f", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "\n", + "\u001b[1m> Entering new LLMMathChain chain...\u001b[0m\n", + "whats 2 raised to .12\u001b[32;1m\u001b[1;3m\n", + "Answer: 1.0791812460476249\u001b[0m\n", + "\u001b[1m> Finished chain.\u001b[0m\n" + ] + }, + { + "data": { + "text/plain": [ + "'Answer: 1.0791812460476249'" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "chain.run(\"whats 2 raised to .12\")" + ] + }, + { + "cell_type": "markdown", + "id": "8db72cda", + "metadata": {}, + "source": [ + "Sometimes chains will require extra arguments that were not serialized with the chain. For example, a chain that does question answering over a vector database will require a vector database." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "aab39528", + "metadata": {}, + "outputs": [], + "source": [ + "from langchain.embeddings.openai import OpenAIEmbeddings\n", + "from langchain.vectorstores.faiss import FAISS\n", + "from langchain.text_splitter import CharacterTextSplitter\n", + "from langchain import OpenAI, VectorDBQA" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "16a85d5e", + "metadata": {}, + "outputs": [], + "source": [ + "with open('../../state_of_the_union.txt') as f:\n", + " state_of_the_union = f.read()\n", + "text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=0)\n", + "texts = text_splitter.split_text(state_of_the_union)\n", + "\n", + "embeddings = OpenAIEmbeddings()\n", + "vectorstore = FAISS.from_texts(texts, embeddings)" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "6a82e91e", + "metadata": {}, + "outputs": [], + "source": [ + "chain = load_chain(\"lc://chains/vector-db-qa/stuff/chain.json\", vectorstore=vectorstore)" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "efe9b25b", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\" The president said that Jackson is one of the nation's top legal minds, a former top litigator in private practice, a former federal public defender, and from a family of public school educators and police officers, and that she has received a broad range of support from the Fraternal Order of Police to former judges appointed by Democrats and Republicans.\"" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "query = \"What did the president say about Ketanji Brown Jackson\"\n", + "chain.run(query)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f910a32f", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/modules/chains/how_to_guides.rst b/docs/modules/chains/how_to_guides.rst index 7090341f..2b2d79e8 100644 --- a/docs/modules/chains/how_to_guides.rst +++ b/docs/modules/chains/how_to_guides.rst @@ -18,3 +18,7 @@ They are broken up into three categories: ./generic_how_to.rst ./combine_docs_how_to.rst ./utility_how_to.rst + +In addition to different types of chains, we also have the following how-to guides for working with chains in general: + +`Load From Hub <./generic/from_hub.html>`_: This notebook covers how to load chains from `LangChainHub `_.