From 24f889f2bc15f1595ee00ccf8ff82a2f5b4e7042 Mon Sep 17 00:00:00 2001 From: Liu Ming <83799887@qq.com> Date: Tue, 25 Jul 2023 06:46:02 +0800 Subject: [PATCH] Change with_history option to False for ChatGLM by default (#8076) ChatGLM LLM integration will by default accumulate conversation history(with_history=True) to ChatGLM backend api, which is not expected in most cases. This PR set with_history=False by default, user should explicitly set llm.with_history=True to turn this feature on. Related PR: #8048 #7774 --------- Co-authored-by: mlot Co-authored-by: Bagatur --- docs/extras/integrations/llms/chatglm.ipynb | 22 +++++---------------- libs/langchain/langchain/llms/chatglm.py | 2 +- 2 files changed, 6 insertions(+), 18 deletions(-) diff --git a/docs/extras/integrations/llms/chatglm.ipynb b/docs/extras/integrations/llms/chatglm.ipynb index b9db839b9e..0601925a5f 100644 --- a/docs/extras/integrations/llms/chatglm.ipynb +++ b/docs/extras/integrations/llms/chatglm.ipynb @@ -55,7 +55,11 @@ " history=[[\"我将从美国到中国来旅游,出行前希望了解中国的城市\", \"欢迎问我任何问题。\"]],\n", " top_p=0.9,\n", " model_kwargs={\"sample_model_args\": False},\n", - ")" + ")\n", + "\n", + "# turn on with_history only when you want the LLM object to keep track of the conversation history\n", + "# and send the accumulated context to the backend model api, which make it stateful. By default it is stateless.\n", + "# llm.with_history = True" ] }, { @@ -95,22 +99,6 @@ "\n", "llm_chain.run(question)" ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "By Default, ChatGLM is statful to keep track of the conversation history and send the accumulated context to the model. To enable stateless mode, we could set ChatGLM.with_history as `False` explicitly." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "llm.with_history = False" - ] } ], "metadata": { diff --git a/libs/langchain/langchain/llms/chatglm.py b/libs/langchain/langchain/llms/chatglm.py index 072e1bc50d..232f2f9af7 100644 --- a/libs/langchain/langchain/llms/chatglm.py +++ b/libs/langchain/langchain/llms/chatglm.py @@ -37,7 +37,7 @@ class ChatGLM(LLM): """History of the conversation""" top_p: float = 0.7 """Top P for nucleus sampling from 0 to 1""" - with_history: bool = True + with_history: bool = False """Whether to use history or not""" @property