Harrison/parameter (#7081)

add parameter to use original question or not

---------

Co-authored-by: Dev 2049 <dev.dev2049@gmail.com>
Co-authored-by: Bagatur <baskaryan@gmail.com>
This commit is contained in:
Harrison Chase 2023-07-05 20:51:25 -04:00 committed by GitHub
parent 930e319ca7
commit 695e7027e6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -63,6 +63,11 @@ class BaseConversationalRetrievalChain(Chain):
a new standalone question to be used later on.""" a new standalone question to be used later on."""
output_key: str = "answer" output_key: str = "answer"
"""The output key to return the final answer of this chain in.""" """The output key to return the final answer of this chain in."""
rephrase_question: bool = True
"""Whether or not to pass the new generated question to the combine_docs_chain.
If True, will pass the new generated question along.
If False, will only use the new generated question for retrieval and pass the
original question along to the combine_docs_chain."""
return_source_documents: bool = False return_source_documents: bool = False
"""Return the retrieved source documents as part of the final result.""" """Return the retrieved source documents as part of the final result."""
return_generated_question: bool = False return_generated_question: bool = False
@ -131,7 +136,8 @@ class BaseConversationalRetrievalChain(Chain):
else: else:
docs = self._get_docs(new_question, inputs) # type: ignore[call-arg] docs = self._get_docs(new_question, inputs) # type: ignore[call-arg]
new_inputs = inputs.copy() new_inputs = inputs.copy()
new_inputs["question"] = new_question if self.rephrase_question:
new_inputs["question"] = new_question
new_inputs["chat_history"] = chat_history_str new_inputs["chat_history"] = chat_history_str
answer = self.combine_docs_chain.run( answer = self.combine_docs_chain.run(
input_documents=docs, callbacks=_run_manager.get_child(), **new_inputs input_documents=docs, callbacks=_run_manager.get_child(), **new_inputs
@ -178,7 +184,8 @@ class BaseConversationalRetrievalChain(Chain):
docs = await self._aget_docs(new_question, inputs) # type: ignore[call-arg] docs = await self._aget_docs(new_question, inputs) # type: ignore[call-arg]
new_inputs = inputs.copy() new_inputs = inputs.copy()
new_inputs["question"] = new_question if self.rephrase_question:
new_inputs["question"] = new_question
new_inputs["chat_history"] = chat_history_str new_inputs["chat_history"] = chat_history_str
answer = await self.combine_docs_chain.arun( answer = await self.combine_docs_chain.arun(
input_documents=docs, callbacks=_run_manager.get_child(), **new_inputs input_documents=docs, callbacks=_run_manager.get_child(), **new_inputs
@ -212,8 +219,9 @@ class ConversationalRetrievalChain(BaseConversationalRetrievalChain):
2. This new question is passed to the retriever and relevant documents are 2. This new question is passed to the retriever and relevant documents are
returned. returned.
3. The retrieved documents are passed to an LLM along with the new question to 3. The retrieved documents are passed to an LLM along with either the new question
generate a final answer. (default behavior) or the original question and chat history to generate a final
response.
Example: Example:
.. code-block:: python .. code-block:: python