docs: update import paths for callbacks to use langchain_community callbacks where applicable (#18691)

Refactored imports from `langchain` to `langchain_community` whenever it
is applicable
wfh/log_error
Leonid Ganeline 4 months ago committed by GitHub
parent 2619420df1
commit 81cbf0f2fd
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -177,7 +177,7 @@
"metadata": {},
"outputs": [],
"source": [
"from langchain.callbacks import ArgillaCallbackHandler\n",
"from langchain_community.callbacks.argilla_callback import ArgillaCallbackHandler\n",
"\n",
"argilla_callback = ArgillaCallbackHandler(\n",
" dataset_name=\"langchain-dataset\",\n",
@ -213,7 +213,7 @@
}
],
"source": [
"from langchain.callbacks import ArgillaCallbackHandler, StdOutCallbackHandler\n",
"from langchain_core.callbacks.stdout import StdOutCallbackHandler\n",
"from langchain_openai import OpenAI\n",
"\n",
"argilla_callback = ArgillaCallbackHandler(\n",
@ -277,9 +277,9 @@
}
],
"source": [
"from langchain.callbacks import ArgillaCallbackHandler, StdOutCallbackHandler\n",
"from langchain.chains import LLMChain\n",
"from langchain.prompts import PromptTemplate\n",
"from langchain_core.callbacks.stdout import StdOutCallbackHandler\n",
"from langchain_openai import OpenAI\n",
"\n",
"argilla_callback = ArgillaCallbackHandler(\n",
@ -361,7 +361,7 @@
],
"source": [
"from langchain.agents import AgentType, initialize_agent, load_tools\n",
"from langchain.callbacks import ArgillaCallbackHandler, StdOutCallbackHandler\n",
"from langchain_core.callbacks.stdout import StdOutCallbackHandler\n",
"from langchain_openai import OpenAI\n",
"\n",
"argilla_callback = ArgillaCallbackHandler(\n",

@ -97,7 +97,7 @@
"if \"LANGCHAIN_COMET_TRACING\" in os.environ:\n",
" del os.environ[\"LANGCHAIN_COMET_TRACING\"]\n",
"\n",
"from langchain.callbacks.tracers.comet import CometTracer\n",
"from langchain_community.callbacks.tracers.comet import CometTracer\n",
"\n",
"tracer = CometTracer()\n",
"\n",
@ -130,7 +130,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.1"
"version": "3.10.12"
}
},
"nbformat": 4,

@ -118,7 +118,7 @@
"metadata": {},
"outputs": [],
"source": [
"from langchain.callbacks.confident_callback import DeepEvalCallbackHandler\n",
"from langchain_community.callbacks.confident_callback import DeepEvalCallbackHandler\n",
"\n",
"deepeval_callback = DeepEvalCallbackHandler(\n",
" implementation_name=\"langchainQuickstart\", metrics=[answer_relevancy_metric]\n",
@ -296,7 +296,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.3"
"version": "3.10.12"
},
"vscode": {
"interpreter": {

@ -65,6 +65,23 @@
"Ensure you have installed the `context-python` package before using the handler."
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"execution": {
"iopub.execute_input": "2024-03-06T19:05:26.534124Z",
"iopub.status.busy": "2024-03-06T19:05:26.533924Z",
"iopub.status.idle": "2024-03-06T19:05:26.798727Z",
"shell.execute_reply": "2024-03-06T19:05:26.798135Z",
"shell.execute_reply.started": "2024-03-06T19:05:26.534109Z"
}
},
"outputs": [],
"source": [
"from langchain_community.callbacks.context_callback import ContextCallbackHandler"
]
},
{
"cell_type": "code",
"execution_count": 3,
@ -73,8 +90,6 @@
"source": [
"import os\n",
"\n",
"from langchain.callbacks import ContextCallbackHandler\n",
"\n",
"token = os.environ[\"CONTEXT_API_TOKEN\"]\n",
"\n",
"context_callback = ContextCallbackHandler(token)"
@ -99,7 +114,6 @@
"source": [
"import os\n",
"\n",
"from langchain.callbacks import ContextCallbackHandler\n",
"from langchain.schema import (\n",
" HumanMessage,\n",
" SystemMessage,\n",
@ -155,7 +169,6 @@
"source": [
"import os\n",
"\n",
"from langchain.callbacks import ContextCallbackHandler\n",
"from langchain.chains import LLMChain\n",
"from langchain.prompts import PromptTemplate\n",
"from langchain.prompts.chat import (\n",

@ -39,6 +39,16 @@
"%pip install --upgrade --quiet tiktoken"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "3c9d9424-0879-4f14-91e5-1292e22820d7",
"metadata": {},
"outputs": [],
"source": [
"from langchain_community.callbacks.infino_callback import InfinoCallbackHandler"
]
},
{
"cell_type": "code",
"execution_count": 2,
@ -53,7 +63,6 @@
"import matplotlib.dates as md\n",
"import matplotlib.pyplot as plt\n",
"from infinopy import InfinoClient\n",
"from langchain.callbacks import InfinoCallbackHandler\n",
"from langchain_openai import OpenAI"
]
},

@ -160,6 +160,25 @@
"You can collect input LLM prompts and output responses in a LabelStudio project, connecting it via `LabelStudioCallbackHandler`:"
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"execution": {
"iopub.execute_input": "2024-03-06T19:07:34.462103Z",
"iopub.status.busy": "2024-03-06T19:07:34.461651Z",
"iopub.status.idle": "2024-03-06T19:07:34.661936Z",
"shell.execute_reply": "2024-03-06T19:07:34.661284Z",
"shell.execute_reply.started": "2024-03-06T19:07:34.462067Z"
}
},
"outputs": [],
"source": [
"from langchain_community.callbacks.labelstudio_callback import (\n",
" LabelStudioCallbackHandler,\n",
")"
]
},
{
"cell_type": "code",
"execution_count": null,
@ -170,7 +189,6 @@
},
"outputs": [],
"source": [
"from langchain.callbacks import LabelStudioCallbackHandler\n",
"from langchain_openai import OpenAI\n",
"\n",
"llm = OpenAI(\n",
@ -241,7 +259,6 @@
},
"outputs": [],
"source": [
"from langchain.callbacks import LabelStudioCallbackHandler\n",
"from langchain_core.messages import HumanMessage, SystemMessage\n",
"from langchain_openai import ChatOpenAI\n",
"\n",

@ -19,7 +19,7 @@ export LLMONITOR_APP_ID="..."
If you'd prefer not to set an environment variable, you can pass the key directly when initializing the callback handler:
```python
from langchain.callbacks import LLMonitorCallbackHandler
from langchain_community.callbacks.llmonitor_callback import LLMonitorCallbackHandler
handler = LLMonitorCallbackHandler(app_id="...")
```
@ -29,7 +29,6 @@ handler = LLMonitorCallbackHandler(app_id="...")
```python
from langchain_openai import OpenAI
from langchain_openai import ChatOpenAI
from langchain.callbacks import LLMonitorCallbackHandler
handler = LLMonitorCallbackHandler()
@ -53,9 +52,9 @@ Example:
```python
from langchain_openai import ChatOpenAI
from langchain_community.callbacks.llmonitor_callback import LLMonitorCallbackHandler
from langchain_core.messages import SystemMessage, HumanMessage
from langchain.agents import OpenAIFunctionsAgent, AgentExecutor, tool
from langchain.callbacks import LLMonitorCallbackHandler
llm = ChatOpenAI(temperature=0)
@ -86,7 +85,8 @@ Another example:
```python
from langchain.agents import load_tools, initialize_agent, AgentType
from langchain_openai import OpenAI
from langchain.callbacks import LLMonitorCallbackHandler
from langchain_community.callbacks.llmonitor_callback import LLMonitorCallbackHandler
handler = LLMonitorCallbackHandler()
@ -104,7 +104,7 @@ agent.run(
User tracking allows you to identify your users, track their cost, conversations and more.
```python
from langchain.callbacks.llmonitor_callback import LLMonitorCallbackHandler, identify
from langchain_community.callbacks.llmonitor_callback import LLMonitorCallbackHandler, identify
with identify("user-123"):
llm("Tell me a joke")

@ -68,14 +68,32 @@
"In this simple example we use `PromptLayerCallbackHandler` with `ChatOpenAI`. We add a PromptLayer tag named `chatopenai`"
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"execution": {
"iopub.execute_input": "2024-03-06T19:10:56.673622Z",
"iopub.status.busy": "2024-03-06T19:10:56.673421Z",
"iopub.status.idle": "2024-03-06T19:10:56.887519Z",
"shell.execute_reply": "2024-03-06T19:10:56.886895Z",
"shell.execute_reply.started": "2024-03-06T19:10:56.673608Z"
}
},
"outputs": [],
"source": [
"import promptlayer # Don't forget this 🍰\n",
"from langchain_community.callbacks.promptlayer_callback import (\n",
" PromptLayerCallbackHandler,\n",
")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import promptlayer # Don't forget this 🍰\n",
"from langchain.callbacks import PromptLayerCallbackHandler\n",
"from langchain.schema import (\n",
" HumanMessage,\n",
")\n",
@ -108,8 +126,6 @@
"metadata": {},
"outputs": [],
"source": [
"import promptlayer # Don't forget this 🍰\n",
"from langchain.callbacks import PromptLayerCallbackHandler\n",
"from langchain_community.llms import GPT4All\n",
"\n",
"model = GPT4All(model=\"./models/gpt4all-model.bin\", n_ctx=512, n_threads=8)\n",
@ -140,8 +156,6 @@
"metadata": {},
"outputs": [],
"source": [
"import promptlayer # Don't forget this 🍰\n",
"from langchain.callbacks import PromptLayerCallbackHandler\n",
"from langchain_openai import OpenAI\n",
"\n",
"\n",

@ -70,6 +70,16 @@
"os.environ[\"SERPAPI_API_KEY\"] = \"<ADD-KEY-HERE>\""
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "e79dc1c0-b9dc-4652-9059-f3a8aa97b74a",
"metadata": {},
"outputs": [],
"source": [
"from langchain_community.callbacks.sagemaker_callback import SageMakerCallbackHandler"
]
},
{
"cell_type": "code",
"execution_count": null,
@ -80,7 +90,6 @@
"outputs": [],
"source": [
"from langchain.agents import initialize_agent, load_tools\n",
"from langchain.callbacks import SageMakerCallbackHandler\n",
"from langchain.chains import LLMChain, SimpleSequentialChain\n",
"from langchain.prompts import PromptTemplate\n",
"from langchain_openai import OpenAI\n",

@ -28,7 +28,9 @@ You can run `streamlit hello` to load a sample app and validate your install suc
To create a `StreamlitCallbackHandler`, you just need to provide a parent container to render the output.
```python
from langchain_community.callbacks import StreamlitCallbackHandler
from langchain_community.callbacks.streamlit import (
StreamlitCallbackHandler,
)
import streamlit as st
st_callback = StreamlitCallbackHandler(st.container())
@ -47,7 +49,6 @@ thoughts and actions live in your app.
import streamlit as st
from langchain import hub
from langchain.agents import AgentExecutor, create_react_agent, load_tools
from langchain_community.callbacks import StreamlitCallbackHandler
from langchain_openai import OpenAI
llm = OpenAI(temperature=0, streaming=True)

@ -65,6 +65,16 @@
"os.environ[\"TRUBRICS_PASSWORD\"] = \"***\""
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "36fa67da-8a05-4d54-b0a3-dc173f3107a0",
"metadata": {},
"outputs": [],
"source": [
"from langchain_community.callbacks.trubrics_callback import TrubricsCallbackHandler"
]
},
{
"cell_type": "markdown",
"id": "cd7177b0-a9e8-45ae-adb0-ea779376511b",
@ -148,7 +158,6 @@
},
"outputs": [],
"source": [
"from langchain.callbacks import TrubricsCallbackHandler\n",
"from langchain_openai import OpenAI"
]
},
@ -266,7 +275,6 @@
},
"outputs": [],
"source": [
"from langchain.callbacks import TrubricsCallbackHandler\n",
"from langchain_core.messages import HumanMessage, SystemMessage\n",
"from langchain_openai import ChatOpenAI"
]

Loading…
Cancel
Save