mirror of
https://github.com/hwchase17/langchain
synced 2024-11-06 03:20:49 +00:00
Change with_history option to False for ChatGLM by default (#8076)
ChatGLM LLM integration will by default accumulate conversation history(with_history=True) to ChatGLM backend api, which is not expected in most cases. This PR set with_history=False by default, user should explicitly set llm.with_history=True to turn this feature on. Related PR: #8048 #7774 --------- Co-authored-by: mlot <limpo2000@gmail.com> Co-authored-by: Bagatur <baskaryan@gmail.com>
This commit is contained in:
parent
1f055775f8
commit
24f889f2bc
@ -55,7 +55,11 @@
|
|||||||
" history=[[\"我将从美国到中国来旅游,出行前希望了解中国的城市\", \"欢迎问我任何问题。\"]],\n",
|
" history=[[\"我将从美国到中国来旅游,出行前希望了解中国的城市\", \"欢迎问我任何问题。\"]],\n",
|
||||||
" top_p=0.9,\n",
|
" top_p=0.9,\n",
|
||||||
" model_kwargs={\"sample_model_args\": False},\n",
|
" model_kwargs={\"sample_model_args\": False},\n",
|
||||||
")"
|
")\n",
|
||||||
|
"\n",
|
||||||
|
"# turn on with_history only when you want the LLM object to keep track of the conversation history\n",
|
||||||
|
"# and send the accumulated context to the backend model api, which make it stateful. By default it is stateless.\n",
|
||||||
|
"# llm.with_history = True"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -95,22 +99,6 @@
|
|||||||
"\n",
|
"\n",
|
||||||
"llm_chain.run(question)"
|
"llm_chain.run(question)"
|
||||||
]
|
]
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "markdown",
|
|
||||||
"metadata": {},
|
|
||||||
"source": [
|
|
||||||
"By Default, ChatGLM is statful to keep track of the conversation history and send the accumulated context to the model. To enable stateless mode, we could set ChatGLM.with_history as `False` explicitly."
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": null,
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"llm.with_history = False"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"metadata": {
|
"metadata": {
|
||||||
|
@ -37,7 +37,7 @@ class ChatGLM(LLM):
|
|||||||
"""History of the conversation"""
|
"""History of the conversation"""
|
||||||
top_p: float = 0.7
|
top_p: float = 0.7
|
||||||
"""Top P for nucleus sampling from 0 to 1"""
|
"""Top P for nucleus sampling from 0 to 1"""
|
||||||
with_history: bool = True
|
with_history: bool = False
|
||||||
"""Whether to use history or not"""
|
"""Whether to use history or not"""
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
Loading…
Reference in New Issue
Block a user