From 922193475ad9f59fb7041a53cf30c242f4711f35 Mon Sep 17 00:00:00 2001 From: Bagatur <22008038+baskaryan@users.noreply.github.com> Date: Tue, 24 Oct 2023 11:52:47 -0400 Subject: [PATCH] Docs: Add LCEL to chains/foundational/transform (#12212) --- .../chains/foundational/transformation.ipynb | 101 ++++++++++++++---- 1 file changed, 78 insertions(+), 23 deletions(-) diff --git a/docs/docs/modules/chains/foundational/transformation.ipynb b/docs/docs/modules/chains/foundational/transformation.ipynb index 9114ab7359..f7caa2b9f9 100644 --- a/docs/docs/modules/chains/foundational/transformation.ipynb +++ b/docs/docs/modules/chains/foundational/transformation.ipynb @@ -7,21 +7,27 @@ "source": [ "# Transformation\n", "\n", - "This notebook showcases using a generic transformation chain.\n", + "Often we want to transform inputs as they are passed from one component to another.\n", "\n", - "As an example, we will create a dummy transformation that takes in a super long text, filters the text to only the first 3 paragraphs, and then passes that into an `LLMChain` to summarize those." + "As an example, we will create a dummy transformation that takes in a super long text, filters the text to only the first 3 paragraphs, and then passes that into a chain to summarize those." ] }, { "cell_type": "code", - "execution_count": 1, - "id": "bbbb4330", + "execution_count": 2, + "id": "d257f50d-c53d-41b7-be8a-df23fbd7c017", "metadata": {}, "outputs": [], "source": [ - "from langchain.chains import TransformChain, LLMChain, SimpleSequentialChain\n", - "from langchain.llms import OpenAI\n", - "from langchain.prompts import PromptTemplate" + "from langchain.prompts import PromptTemplate\n", + "\n", + "prompt = PromptTemplate.from_template(\n", + " \"\"\"Summarize this text:\n", + "\n", + "{output_text}\n", + "\n", + "Summary:\"\"\"\n", + ")" ] }, { @@ -35,9 +41,67 @@ " state_of_the_union = f.read()" ] }, + { + "cell_type": "markdown", + "id": "4c938536-e3fb-45eb-a1b3-cb82be410e32", + "metadata": {}, + "source": [ + "## Using LCEL\n", + "\n", + "With LCEL this is trivial, since we can add functions in any `RunnableSequence`." + ] + }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 17, + "id": "1e53e851-b1bd-424f-a144-5f2e8b413dcf", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'The speaker acknowledges the presence of important figures in the government and addresses the audience as fellow Americans. They highlight the impact of COVID-19 on keeping people apart in the previous year but express joy in being able to come together again. The speaker emphasizes the unity of Democrats, Republicans, and Independents as Americans.'" + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from langchain.chat_models import ChatOpenAI\n", + "from langchain.schema import StrOutputParser\n", + "\n", + "runnable = {\"output_text\": lambda text: \"\\n\\n\".join(text.split(\"\\n\\n\")[:3])} | prompt | ChatOpenAI() | StrOutputParser()\n", + "runnable.invoke(state_of_the_union)" + ] + }, + { + "cell_type": "markdown", + "id": "a9b9bd07-155f-4777-9215-509d39ecfe3f", + "metadata": {}, + "source": [ + "## [Legacy] TransformationChain\n", + "\n", + "::note:: This is a legacy class, using LCEL as shown above is preffered.\n", + "\n", + "This notebook showcases using a generic transformation chain." + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "bbbb4330", + "metadata": {}, + "outputs": [], + "source": [ + "from langchain.chains import TransformChain, LLMChain, SimpleSequentialChain\n", + "from langchain.llms import OpenAI\n" + ] + }, + { + "cell_type": "code", + "execution_count": 13, "id": "98739592", "metadata": {}, "outputs": [], @@ -47,7 +111,6 @@ " shortened_text = \"\\n\\n\".join(text.split(\"\\n\\n\")[:3])\n", " return {\"output_text\": shortened_text}\n", "\n", - "\n", "transform_chain = TransformChain(\n", " input_variables=[\"text\"], output_variables=[\"output_text\"], transform=transform_func\n", ")" @@ -55,7 +118,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 14, "id": "e9397934", "metadata": {}, "outputs": [], @@ -71,7 +134,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 15, "id": "06f51f17", "metadata": {}, "outputs": [], @@ -81,17 +144,17 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 16, "id": "f7caa1ee", "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "' The speaker addresses the nation, noting that while last year they were kept apart due to COVID-19, this year they are together again. They are reminded that regardless of their political affiliations, they are all Americans.'" + "' In an address to the nation, the speaker acknowledges the hardships of the past year due to the COVID-19 pandemic, but emphasizes that regardless of political affiliation, all Americans can come together.'" ] }, - "execution_count": 7, + "execution_count": 16, "metadata": {}, "output_type": "execute_result" } @@ -99,14 +162,6 @@ "source": [ "sequential_chain.run(state_of_the_union)" ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "e3ca6409", - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { @@ -125,7 +180,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.3" + "version": "3.9.1" } }, "nbformat": 4,