From cda43c5a11504632b6ab40cf4e2bf82a469750a4 Mon Sep 17 00:00:00 2001 From: Tomaz Bratanic Date: Tue, 12 Mar 2024 04:11:52 +0100 Subject: [PATCH] experimental[patch]: Fix LLM graph transformer default prompt (#18856) Some LLMs do not allow multiple user messages in sequence. --- .../langchain_experimental/graph_transformers/llm.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/libs/experimental/langchain_experimental/graph_transformers/llm.py b/libs/experimental/langchain_experimental/graph_transformers/llm.py index d5d2fbc98e..0d0888f84b 100644 --- a/libs/experimental/langchain_experimental/graph_transformers/llm.py +++ b/libs/experimental/langchain_experimental/graph_transformers/llm.py @@ -52,14 +52,12 @@ default_prompt = ChatPromptTemplate.from_messages( ( "human", ( + "Tip: Make sure to answer in the correct format and do " + "not include any explanations. " "Use the given format to extract information from the " "following input: {input}" ), ), - ( - "human", - "Tip: Make sure to answer in the correct format and do not include any ", - ), ] )