mirror of
https://github.com/hwchase17/langchain
synced 2024-10-29 17:07:25 +00:00
181 lines
4.0 KiB
Plaintext
181 lines
4.0 KiB
Plaintext
|
{
|
||
|
"cells": [
|
||
|
{
|
||
|
"cell_type": "markdown",
|
||
|
"id": "5062941a",
|
||
|
"metadata": {},
|
||
|
"source": [
|
||
|
"# Adding memory\n",
|
||
|
"\n",
|
||
|
"This shows how to add memory to an arbitrary chain. Right now, you can use the memory classes but need to hook it up manually"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"cell_type": "code",
|
||
|
"execution_count": 1,
|
||
|
"id": "7998efd8",
|
||
|
"metadata": {},
|
||
|
"outputs": [],
|
||
|
"source": [
|
||
|
"from langchain.chat_models import ChatOpenAI\n",
|
||
|
"from langchain.memory import ConversationBufferMemory\n",
|
||
|
"from langchain.schema.runnable import RunnableMap\n",
|
||
|
"from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder\n",
|
||
|
"\n",
|
||
|
"model = ChatOpenAI()\n",
|
||
|
"prompt = ChatPromptTemplate.from_messages([\n",
|
||
|
" (\"system\", \"You are a helpful chatbot\"),\n",
|
||
|
" MessagesPlaceholder(variable_name=\"history\"),\n",
|
||
|
" (\"human\", \"{input}\")\n",
|
||
|
"])"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"cell_type": "code",
|
||
|
"execution_count": 2,
|
||
|
"id": "fa0087f3",
|
||
|
"metadata": {},
|
||
|
"outputs": [],
|
||
|
"source": [
|
||
|
"memory = ConversationBufferMemory(return_messages=True)"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"cell_type": "code",
|
||
|
"execution_count": 3,
|
||
|
"id": "06b531ae",
|
||
|
"metadata": {},
|
||
|
"outputs": [
|
||
|
{
|
||
|
"data": {
|
||
|
"text/plain": [
|
||
|
"{'history': []}"
|
||
|
]
|
||
|
},
|
||
|
"execution_count": 3,
|
||
|
"metadata": {},
|
||
|
"output_type": "execute_result"
|
||
|
}
|
||
|
],
|
||
|
"source": [
|
||
|
"memory.load_memory_variables({})"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"cell_type": "code",
|
||
|
"execution_count": 4,
|
||
|
"id": "d9437af6",
|
||
|
"metadata": {},
|
||
|
"outputs": [],
|
||
|
"source": [
|
||
|
"chain = RunnableMap({\n",
|
||
|
" \"input\": lambda x: x[\"input\"],\n",
|
||
|
" \"memory\": memory.load_memory_variables\n",
|
||
|
"}) | {\n",
|
||
|
" \"input\": lambda x: x[\"input\"],\n",
|
||
|
" \"history\": lambda x: x[\"memory\"][\"history\"]\n",
|
||
|
"} | prompt | model"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"cell_type": "code",
|
||
|
"execution_count": 5,
|
||
|
"id": "bed1e260",
|
||
|
"metadata": {},
|
||
|
"outputs": [
|
||
|
{
|
||
|
"data": {
|
||
|
"text/plain": [
|
||
|
"AIMessage(content='Hello Bob! How can I assist you today?', additional_kwargs={}, example=False)"
|
||
|
]
|
||
|
},
|
||
|
"execution_count": 5,
|
||
|
"metadata": {},
|
||
|
"output_type": "execute_result"
|
||
|
}
|
||
|
],
|
||
|
"source": [
|
||
|
"inputs = {\"input\": \"hi im bob\"}\n",
|
||
|
"response = chain.invoke(inputs)\n",
|
||
|
"response"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"cell_type": "code",
|
||
|
"execution_count": 6,
|
||
|
"id": "890475b4",
|
||
|
"metadata": {},
|
||
|
"outputs": [],
|
||
|
"source": [
|
||
|
"memory.save_context(inputs, {\"output\": response.content})"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"cell_type": "code",
|
||
|
"execution_count": 7,
|
||
|
"id": "e8fcb77f",
|
||
|
"metadata": {},
|
||
|
"outputs": [
|
||
|
{
|
||
|
"data": {
|
||
|
"text/plain": [
|
||
|
"{'history': [HumanMessage(content='hi im bob', additional_kwargs={}, example=False),\n",
|
||
|
" AIMessage(content='Hello Bob! How can I assist you today?', additional_kwargs={}, example=False)]}"
|
||
|
]
|
||
|
},
|
||
|
"execution_count": 7,
|
||
|
"metadata": {},
|
||
|
"output_type": "execute_result"
|
||
|
}
|
||
|
],
|
||
|
"source": [
|
||
|
"memory.load_memory_variables({})"
|
||
|
]
|
||
|
},
|
||
|
{
|
||
|
"cell_type": "code",
|
||
|
"execution_count": 8,
|
||
|
"id": "d837d5c3",
|
||
|
"metadata": {},
|
||
|
"outputs": [
|
||
|
{
|
||
|
"data": {
|
||
|
"text/plain": [
|
||
|
"AIMessage(content='Your name is Bob.', additional_kwargs={}, example=False)"
|
||
|
]
|
||
|
},
|
||
|
"execution_count": 8,
|
||
|
"metadata": {},
|
||
|
"output_type": "execute_result"
|
||
|
}
|
||
|
],
|
||
|
"source": [
|
||
|
"inputs = {\"input\": \"whats my name\"}\n",
|
||
|
"response = chain.invoke(inputs)\n",
|
||
|
"response"
|
||
|
]
|
||
|
}
|
||
|
],
|
||
|
"metadata": {
|
||
|
"kernelspec": {
|
||
|
"display_name": "Python 3 (ipykernel)",
|
||
|
"language": "python",
|
||
|
"name": "python3"
|
||
|
},
|
||
|
"language_info": {
|
||
|
"codemirror_mode": {
|
||
|
"name": "ipython",
|
||
|
"version": 3
|
||
|
},
|
||
|
"file_extension": ".py",
|
||
|
"mimetype": "text/x-python",
|
||
|
"name": "python",
|
||
|
"nbconvert_exporter": "python",
|
||
|
"pygments_lexer": "ipython3",
|
||
|
"version": "3.9.1"
|
||
|
}
|
||
|
},
|
||
|
"nbformat": 4,
|
||
|
"nbformat_minor": 5
|
||
|
}
|