From f337f3ed36b59cd841801fe9f7158440dc608405 Mon Sep 17 00:00:00 2001 From: ccurme Date: Wed, 24 Jul 2024 10:45:00 -0400 Subject: [PATCH] docs: update chain migration guide (#24501) - Update `ConversationChain` example to show use without session IDs; - Fix a minor bug (specify history_messages_key). --- docs/docs/how_to/migrate_chains.ipynb | 41 ++++++++++++++++++--------- 1 file changed, 27 insertions(+), 14 deletions(-) diff --git a/docs/docs/how_to/migrate_chains.ipynb b/docs/docs/how_to/migrate_chains.ipynb index 8c879871ac..f12464f9b7 100644 --- a/docs/docs/how_to/migrate_chains.ipynb +++ b/docs/docs/how_to/migrate_chains.ipynb @@ -284,17 +284,17 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 1, "id": "173e1a9c-2a18-4669-b0de-136f39197786", "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "\"Arr, matey! I be sailin' the high seas with me crew, searchin' for buried treasure and adventure! How be ye doin' on this fine day?\"" + "\"Arrr, I be doin' well, me heartie! Just sailin' the high seas in search of treasure and adventure. How be ye?\"" ] }, - "execution_count": 8, + "execution_count": 1, "metadata": {}, "output_type": "execute_result" } @@ -316,14 +316,20 @@ "\n", "history = InMemoryChatMessageHistory()\n", "\n", + "\n", + "def get_history():\n", + " return history\n", + "\n", + "\n", "chain = prompt | ChatOpenAI() | StrOutputParser()\n", "\n", - "wrapped_chain = RunnableWithMessageHistory(chain, lambda x: history)\n", + "wrapped_chain = RunnableWithMessageHistory(\n", + " chain,\n", + " get_history,\n", + " history_messages_key=\"chat_history\",\n", + ")\n", "\n", - "wrapped_chain.invoke(\n", - " {\"input\": \"how are you?\"},\n", - " config={\"configurable\": {\"session_id\": \"42\"}},\n", - ")" + "wrapped_chain.invoke({\"input\": \"how are you?\"})" ] }, { @@ -340,17 +346,17 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 2, "id": "4e05994f-1fbc-4699-bf2e-62cb0e4deeb8", "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "AIMessage(content=\"Ahoy there! What be ye wantin' from this old pirate?\", response_metadata={'token_usage': {'completion_tokens': 15, 'prompt_tokens': 29, 'total_tokens': 44}, 'model_name': 'gpt-3.5-turbo-0125', 'system_fingerprint': None, 'finish_reason': 'stop', 'logprobs': None}, id='run-1846d5f5-0dda-43b6-bb49-864e541f9c29-0', usage_metadata={'input_tokens': 29, 'output_tokens': 15, 'total_tokens': 44})" + "'Ahoy matey! What can this old pirate do for ye today?'" ] }, - "execution_count": 7, + "execution_count": 2, "metadata": {}, "output_type": "execute_result" } @@ -370,9 +376,16 @@ "\n", "chain = prompt | ChatOpenAI() | StrOutputParser()\n", "\n", - "wrapped_chain = RunnableWithMessageHistory(chain, get_session_history)\n", + "wrapped_chain = RunnableWithMessageHistory(\n", + " chain,\n", + " get_session_history,\n", + " history_messages_key=\"chat_history\",\n", + ")\n", "\n", - "wrapped_chain.invoke(\"Hello!\", config={\"configurable\": {\"session_id\": \"abc123\"}})" + "wrapped_chain.invoke(\n", + " {\"input\": \"Hello!\"},\n", + " config={\"configurable\": {\"session_id\": \"abc123\"}},\n", + ")" ] }, { @@ -790,7 +803,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.5" + "version": "3.10.4" } }, "nbformat": 4,