From 61938a02a1e76fa6c6e8203c98a9344a179c810d Mon Sep 17 00:00:00 2001 From: Hakan Tekgul Date: Wed, 5 Jul 2023 12:55:47 -0700 Subject: [PATCH] Create arize_llm_observability.ipynb (#7000) Adding documentation and notebook for Arize callback handler. - @dev2049 - Agents / Tools / Toolkits: @vowelparrot - Tracing / Callbacks: @agola11 --- .../arize_llm_observability.ipynb | 1765 +++++++++++++++++ 1 file changed, 1765 insertions(+) create mode 100644 docs/extras/ecosystem/integrations/arize_llm_observability.ipynb diff --git a/docs/extras/ecosystem/integrations/arize_llm_observability.ipynb b/docs/extras/ecosystem/integrations/arize_llm_observability.ipynb new file mode 100644 index 0000000000..b0de04c9c8 --- /dev/null +++ b/docs/extras/ecosystem/integrations/arize_llm_observability.ipynb @@ -0,0 +1,1765 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "Awn1jcNa0lLZ" + }, + "source": [ + "# Arize\n", + "\n", + "**Let's get started on using Langchain with Arize!** ✨\n", + "\n", + "\n", + "\n", + "Arize is an LLM Observability Platform for real-time monitoring and analysis of your LLM applications.\n", + "\n", + "Use Arize and LangChain together to effectively monitor the performance of your LLM agents, identify areas that require improvement, and make prompt engineering and troubleshooting decisions about your LLM applications. With Arize and LangChain together, data scientists and machine learning engineers can ensure that their LLM applications are running at peak efficiency, enabling them to deliver improved results and drive greater value for their organizations." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "_X9GuXoSXleA" + }, + "source": [ + "# Step 0. Install Dependencies, Import Libraries, Use GPU 📚\n", + "\n", + "Import LangChain, Arize, and Arize CallBack Handler for integration between two tools.\n", + "\n", + "⚠️ Use a GPU to save time generating embeddings. Click on 'Runtime', select 'Change Runtime Type' and\n", + "select 'GPU'." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "RHkd6LP03YAz" + }, + "outputs": [], + "source": [ + "!pip3 install -q langchain\n", + "!pip3 install -q arize\n", + "!pip3 install -q 'arize[AutoEmbeddings]'\n", + "!pip3 install -q openai" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "id": "_LI-9U6y3YA0" + }, + "outputs": [], + "source": [ + "from langchain.callbacks import StdOutCallbackHandler\n", + "from langchain.callbacks.manager import AsyncCallbackManager\n", + "from langchain.callbacks import ArizeCallbackHandler\n", + "from langchain.llms import OpenAI" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "VMjE6vwOOKS-" + }, + "source": [ + "# Step 1. Import and Setup Arize Client\n", + "\n", + "The first step is to setup our Arize client. After that we will log the data from LangChain driven application into Arize.\n", + "\n", + "Retrieve your Arize `API_KEY` and `SPACE_KEY` from your Space Settings page, and paste them in the set-up section below.\n", + "\n", + "We will also set up some metadata and the `ArizeCallBackHandler` to use while logging.\n", + "\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "9cTB9c-TVrZd" + }, + "source": [ + "" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "id": "M-R3wb9T3YA1", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "55af2350-eac5-458b-eab4-f114bb246315" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "\u001b[33m arize.utils.logging | WARNING | No available GPU has been detected. The use of GPU acceleration is strongly recommended. You can check for GPU availability by running `torch.cuda.is_available()`\u001b[0m\n", + "\u001b[38;21m arize.utils.logging | INFO | Downloading pre-trained model 'distilbert-base-uncased'\u001b[0m\n", + "\u001b[38;21m arize.utils.logging | INFO | Downloading tokenizer for 'distilbert-base-uncased'\u001b[0m\n", + "✅ Arize client setup done! Now you can start using Arize!\n" + ] + } + ], + "source": [ + "SPACE_KEY = \"YOUR_SPACE_KEY\"\n", + "API_KEY = \"YOUR_API_KEY\"\n", + "\n", + "if SPACE_KEY == \"YOUR_SPACE_KEY\" or API_KEY == \"YOUR_API_KEY\":\n", + " raise ValueError(\"❌ CHANGE SPACE AND API KEYS\")\n", + "\n", + "# Define callback handler for Arize\n", + "arize_callback = ArizeCallbackHandler(\n", + "model_id=\"llm-langchain-demo\",\n", + "model_version=\"1.0\",\n", + "SPACE_KEY=SPACE_KEY,\n", + "API_KEY=API_KEY\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "QRw4xb6G3YA1" + }, + "outputs": [], + "source": [ + "#Put your OPENAI API Key here!\n", + "%env OPENAI_API_KEY=\"YOUR OPEN API KEY\"" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "9jjWMK3wo_FX" + }, + "source": [ + "## Step 2: Define LLM with ArizeCallBack Handler\n", + "Use the callback handler we defined above within the LLM with LangChain." + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": { + "id": "6hmSJ7r83YA1" + }, + "outputs": [], + "source": [ + "manager = AsyncCallbackManager([StdOutCallbackHandler(), arize_callback])\n", + "llm = OpenAI(temperature=0, callback_manager=manager, verbose=True)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "ec97f_LONfp3" + }, + "source": [ + "## Step 3: Test LLM Responses and Logging into Arize\n", + "Use some simple prompts to test if the LLM works properly and each prompt-response pair is logged into Arize with embeddings." + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": { + "id": "_aCKTExC3YA2", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 251, + "referenced_widgets": [ + "6b5186d3669e4ac0989273825ad6362f", + "e54a15216e4f423f938522be70772936", + "3dfc39b04e2f495e94e57159751daf9d", + "9cc604b742aa4945adb03f0e8c183027", + "9ffd8469c4d14990b9ef1dde2b5ead00", + "457fff99678246f8bc0daa6ce45e23bf", + "7d807df27aa74c4c87af806731780384", + "21c2bfe22b2b46408720f3a2bf4bda68", + "3a7435426c64406c8954bc6eaa24285c", + "b7d98718c1e84625b3b58f659db50afe", + "3bd695f9c6804e9e98de3b912de73514", + "83b27d1df047427988312d455ab94eae", + "46caaac62d894bc381ef82501634a004", + "db4c5f8bf4d94f55b96cc569635e4f02", + "3983759590c642d7a240b5a94e1beb75", + "ce6451e4fd404df98ebcb2cb63cd6829", + "8c915a0342fc4e848b21181ca8854dee", + "67b5899a717e41fdbc429e4e4086edcd", + "90fe11f997324fa0a6c35ce03a49a09a", + "32a0ba0e3c844aff90db2228b466d54f", + "329e3f99d46749799ae1f45f1909c7ea", + "0234e169ca904302b550955aecac6575", + "d6183532390a41c7a10e5282ec38e2af", + "921c3c80e8904a7c963004994df409bc", + "9a96026a2238472c8a0d5ef2d9609b98", + "162931fddc00484e8e127601d5bf47bb", + "f5fd0cc6a6564f46b5b505d1bb52fd80", + "c1c6c7083e2244d7a8aa1fec4aecb429", + "69f335f441a6446cb6953fb0219299ea", + "6b8eba7462fa4e09a885e6eaf068ec08", + "1f4e8e07e2714fd4b48d184210eff980", + "e78e1cda5fea41a4b4b9f4a525f580f9", + "16d419ad3073439c9365fd309eb40dcc", + "a8f34232d7d74646be037fa72d875b29", + "ee8f1d823ac94f8591c057a13468abe5", + "6de09554d13c4c0b84849aaeffc1c78b", + "9ae648c2960f4228bfbcd3a761e7da1c", + "7919e309a10d47b4892810ed49d1372d", + "2a2c9a936ccd473281b1ddd216d3c4ad", + "043c157877ce40e5bc97555d4ad01b57", + "45e2b6c74098404f992f994dd5c823b4", + "7f949681cd0c4918888f7d3682b51f1c", + "8e83f10ee6e3434ab1fecfb2e8feba17", + "56063fc4fa7146d495edf6be6e1a6f5d" + ] + }, + "outputId": "a17ac83d-65d4-49e9-f933-6374c2073166" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "\u001b[38;21m arize.utils.logging | INFO | Generating embedding vectors\u001b[0m\n" + ] + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "Map: 0%| | 0/1 [00:00