fix #3117
This commit is contained in:
Bagatur 2023-08-31 07:29:49 -07:00 committed by GitHub
commit 29270e0378
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 31 additions and 44 deletions

View File

@ -11,7 +11,7 @@ from langchain.prompts.prompt import PromptTemplate
DEFAULT_REFINE_PROMPT_TMPL = (
"The original question is as follows: {question}\n"
"We have provided an existing answer: {existing_answer}\n"
"We have the opportunity to refine the existing answer"
"We have the opportunity to refine the existing answer "
"(only if needed) with some more context below.\n"
"------------\n"
"{context_str}\n"
@ -20,12 +20,10 @@ DEFAULT_REFINE_PROMPT_TMPL = (
"answer the question. "
"If the context isn't useful, return the original answer."
)
DEFAULT_REFINE_PROMPT = PromptTemplate(
input_variables=["question", "existing_answer", "context_str"],
template=DEFAULT_REFINE_PROMPT_TMPL,
)
DEFAULT_REFINE_PROMPT = PromptTemplate.from_template(DEFAULT_REFINE_PROMPT_TMPL)
refine_template = (
"We have the opportunity to refine the existing answer"
"We have the opportunity to refine the existing answer "
"(only if needed) with some more context below.\n"
"------------\n"
"{context_str}\n"
@ -34,12 +32,9 @@ refine_template = (
"answer the question. "
"If the context isn't useful, return the original answer."
)
messages = [
HumanMessagePromptTemplate.from_template("{question}"),
AIMessagePromptTemplate.from_template("{existing_answer}"),
HumanMessagePromptTemplate.from_template(refine_template),
]
CHAT_REFINE_PROMPT = ChatPromptTemplate.from_messages(messages)
CHAT_REFINE_PROMPT = ChatPromptTemplate.from_messages(
[("human", "{question}"), ("ai", "{existing_answer}"), ("human", "refine_template")]
)
REFINE_PROMPT_SELECTOR = ConditionalPromptSelector(
default_prompt=DEFAULT_REFINE_PROMPT,
conditionals=[(is_chat_model, CHAT_REFINE_PROMPT)],
@ -48,28 +43,25 @@ REFINE_PROMPT_SELECTOR = ConditionalPromptSelector(
DEFAULT_TEXT_QA_PROMPT_TMPL = (
"Context information is below. \n"
"---------------------\n"
"{context_str}"
"\n---------------------\n"
"------------\n"
"{context_str}\n"
"------------\n"
"Given the context information and not prior knowledge, "
"answer the question: {question}\n"
)
DEFAULT_TEXT_QA_PROMPT = PromptTemplate(
input_variables=["context_str", "question"], template=DEFAULT_TEXT_QA_PROMPT_TMPL
)
DEFAULT_TEXT_QA_PROMPT = PromptTemplate.from_template(DEFAULT_TEXT_QA_PROMPT_TMPL)
chat_qa_prompt_template = (
"Context information is below. \n"
"---------------------\n"
"{context_str}"
"\n---------------------\n"
"Context information is below.\n"
"------------\n"
"{context_str}\n"
"------------\n"
"Given the context information and not prior knowledge, "
"answer any questions"
)
messages = [
SystemMessagePromptTemplate.from_template(chat_qa_prompt_template),
HumanMessagePromptTemplate.from_template("{question}"),
]
CHAT_QUESTION_PROMPT = ChatPromptTemplate.from_messages(messages)
CHAT_QUESTION_PROMPT = ChatPromptTemplate.from_messages(
[("system", chat_qa_prompt_template), ("human", "{question}")]
)
QUESTION_PROMPT_SELECTOR = ConditionalPromptSelector(
default_prompt=DEFAULT_TEXT_QA_PROMPT,
conditionals=[(is_chat_model, CHAT_QUESTION_PROMPT)],

View File

@ -1,21 +1,16 @@
# flake8: noqa
from langchain.prompts import PromptTemplate
REFINE_PROMPT_TMPL = (
"Your job is to produce a final summary\n"
"We have provided an existing summary up to a certain point: {existing_answer}\n"
"We have the opportunity to refine the existing summary"
"(only if needed) with some more context below.\n"
"------------\n"
"{text}\n"
"------------\n"
"Given the new context, refine the original summary\n"
"If the context isn't useful, return the original summary."
)
REFINE_PROMPT = PromptTemplate(
input_variables=["existing_answer", "text"],
template=REFINE_PROMPT_TMPL,
)
REFINE_PROMPT_TMPL = """\
Your job is to produce a final summary.
We have provided an existing summary up to a certain point: {existing_answer}
We have the opportunity to refine the existing summary (only if needed) with some more context below.
------------
{text}
------------
Given the new context, refine the original summary.
If the context isn't useful, return the original summary.\
""" # noqa: E501
REFINE_PROMPT = PromptTemplate.from_template(REFINE_PROMPT_TMPL)
prompt_template = """Write a concise summary of the following:
@ -25,4 +20,4 @@ prompt_template = """Write a concise summary of the following:
CONCISE SUMMARY:"""
PROMPT = PromptTemplate(template=prompt_template, input_variables=["text"])
PROMPT = PromptTemplate.from_template(prompt_template)