fix stuff count (#1083)

searx-api
Harrison Chase 1 year ago committed by GitHub
parent 971458c5de
commit badeeb37b0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -214,6 +214,78 @@
"result['answer']"
]
},
{
"cell_type": "markdown",
"id": "908c00e2",
"metadata": {},
"source": [
"## Chat Vector DB with `map_reduce`\n",
"We can also use different types of combine document chains with the Chat Vector DB chain."
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "06d91167",
"metadata": {},
"outputs": [],
"source": [
"from langchain.chains import LLMChain\n",
"from langchain.chains.question_answering import load_qa_chain\n",
"from langchain.chains.chat_vector_db.prompts import CONDENSE_QUESTION_PROMPT"
]
},
{
"cell_type": "code",
"execution_count": 9,
"id": "1711d3b4",
"metadata": {},
"outputs": [],
"source": [
"llm = OpenAI(temperature=0)\n",
"question_generator = LLMChain(llm=llm, prompt=CONDENSE_QUESTION_PROMPT)\n",
"doc_chain = load_qa_chain(llm, chain_type=\"map_reduce\")\n",
"\n",
"chain = ChatVectorDBChain(\n",
" vectorstore=vectorstore,\n",
" question_generator=question_generator,\n",
" combine_docs_chain=doc_chain,\n",
")"
]
},
{
"cell_type": "code",
"execution_count": 10,
"id": "375b33ef",
"metadata": {},
"outputs": [],
"source": [
"chat_history = []\n",
"query = \"What did the president say about Ketanji Brown Jackson\"\n",
"result = chain({\"question\": query, \"chat_history\": chat_history})"
]
},
{
"cell_type": "code",
"execution_count": 11,
"id": "ca48ff74",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"\" The president said that Ketanji Brown Jackson is one of the nation's top legal minds, a former top litigator in private practice, a former federal public defender, from a family of public school educators and police officers, a consensus builder, and has received a broad range of support from the Fraternal Order of Police to former judges appointed by Democrats and Republicans.\""
]
},
"execution_count": 11,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"result['answer']"
]
},
{
"cell_type": "markdown",
"id": "2324cdc6-98bf-4708-b8cd-02a98b1e5b67",
@ -293,14 +365,6 @@
"query = \"Did he mention who she suceeded\"\n",
"result = qa({\"question\": query, \"chat_history\": chat_history})"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "a7ea93ff-1899-4171-9c24-85df20ae1a3d",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
@ -319,7 +383,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.9"
"version": "3.9.1"
}
},
"nbformat": 4,

@ -68,7 +68,11 @@ class StuffDocumentsChain(BaseCombineDocumentsChain, BaseModel):
# Format each document according to the prompt
doc_strings = [self.document_prompt.format(**doc) for doc in doc_dicts]
# Join the documents together to put them in the prompt.
inputs = kwargs.copy()
inputs = {
k: v
for k, v in kwargs.items()
if k in self.llm_chain.prompt.input_variables
}
inputs[self.document_variable_name] = "\n\n".join(doc_strings)
return inputs

Loading…
Cancel
Save