mirror of
https://github.com/hwchase17/langchain
synced 2024-11-06 03:20:49 +00:00
76 lines
2.3 KiB
Python
76 lines
2.3 KiB
Python
from langchain.prompts import ChatPromptTemplate, FewShotChatMessagePromptTemplate
|
||
from langchain_community.chat_models import ChatOpenAI
|
||
from langchain_community.utilities import DuckDuckGoSearchAPIWrapper
|
||
from langchain_core.output_parsers import StrOutputParser
|
||
from langchain_core.runnables import RunnableLambda
|
||
|
||
search = DuckDuckGoSearchAPIWrapper(max_results=4)
|
||
|
||
|
||
def retriever(query):
|
||
return search.run(query)
|
||
|
||
|
||
# Few Shot Examples
|
||
examples = [
|
||
{
|
||
"input": "Could the members of The Police perform lawful arrests?",
|
||
"output": "what can the members of The Police do?",
|
||
},
|
||
{
|
||
"input": "Jan Sindel’s was born in what country?",
|
||
"output": "what is Jan Sindel’s personal history?",
|
||
},
|
||
]
|
||
# We now transform these to example messages
|
||
example_prompt = ChatPromptTemplate.from_messages(
|
||
[
|
||
("human", "{input}"),
|
||
("ai", "{output}"),
|
||
]
|
||
)
|
||
few_shot_prompt = FewShotChatMessagePromptTemplate(
|
||
example_prompt=example_prompt,
|
||
examples=examples,
|
||
)
|
||
|
||
prompt = ChatPromptTemplate.from_messages(
|
||
[
|
||
(
|
||
"system",
|
||
"You are an expert at world knowledge. Your task is to step back "
|
||
"and paraphrase a question to a more generic step-back question, which "
|
||
"is easier to answer. Here are a few examples:",
|
||
),
|
||
# Few shot examples
|
||
few_shot_prompt,
|
||
# New question
|
||
("user", "{question}"),
|
||
]
|
||
)
|
||
|
||
question_gen = prompt | ChatOpenAI(temperature=0) | StrOutputParser()
|
||
|
||
response_prompt_template = """You are an expert of world knowledge. I am going to ask you a question. Your response should be comprehensive and not contradicted with the following context if they are relevant. Otherwise, ignore them if they are not relevant.
|
||
|
||
{normal_context}
|
||
{step_back_context}
|
||
|
||
Original Question: {question}
|
||
Answer:""" # noqa: E501
|
||
response_prompt = ChatPromptTemplate.from_template(response_prompt_template)
|
||
|
||
chain = (
|
||
{
|
||
# Retrieve context using the normal question
|
||
"normal_context": RunnableLambda(lambda x: x["question"]) | retriever,
|
||
# Retrieve context using the step-back question
|
||
"step_back_context": question_gen | retriever,
|
||
# Pass on the question
|
||
"question": lambda x: x["question"],
|
||
}
|
||
| response_prompt
|
||
| ChatOpenAI(temperature=0)
|
||
| StrOutputParser()
|
||
)
|