forked from Archives/langchain
Bugfix: Not all combine docs chains takes kwargs prompt
(#3462)
Generalize ConversationalRetrievalChain.from_llm kwargs --------- Co-authored-by: shubham.suneja <shubham.suneja>
This commit is contained in:
parent
3033c6b964
commit
2cbd41145c
@ -172,15 +172,16 @@ class ConversationalRetrievalChain(BaseConversationalRetrievalChain):
|
|||||||
llm: BaseLanguageModel,
|
llm: BaseLanguageModel,
|
||||||
retriever: BaseRetriever,
|
retriever: BaseRetriever,
|
||||||
condense_question_prompt: BasePromptTemplate = CONDENSE_QUESTION_PROMPT,
|
condense_question_prompt: BasePromptTemplate = CONDENSE_QUESTION_PROMPT,
|
||||||
qa_prompt: Optional[BasePromptTemplate] = None,
|
|
||||||
chain_type: str = "stuff",
|
chain_type: str = "stuff",
|
||||||
|
combine_docs_chain_kwargs: Optional[Dict] = None,
|
||||||
**kwargs: Any,
|
**kwargs: Any,
|
||||||
) -> BaseConversationalRetrievalChain:
|
) -> BaseConversationalRetrievalChain:
|
||||||
"""Load chain from LLM."""
|
"""Load chain from LLM."""
|
||||||
|
combine_docs_chain_kwargs = combine_docs_chain_kwargs or {}
|
||||||
doc_chain = load_qa_chain(
|
doc_chain = load_qa_chain(
|
||||||
llm,
|
llm,
|
||||||
chain_type=chain_type,
|
chain_type=chain_type,
|
||||||
prompt=qa_prompt,
|
**combine_docs_chain_kwargs,
|
||||||
)
|
)
|
||||||
condense_question_chain = LLMChain(llm=llm, prompt=condense_question_prompt)
|
condense_question_chain = LLMChain(llm=llm, prompt=condense_question_prompt)
|
||||||
return cls(
|
return cls(
|
||||||
@ -226,15 +227,16 @@ class ChatVectorDBChain(BaseConversationalRetrievalChain):
|
|||||||
llm: BaseLanguageModel,
|
llm: BaseLanguageModel,
|
||||||
vectorstore: VectorStore,
|
vectorstore: VectorStore,
|
||||||
condense_question_prompt: BasePromptTemplate = CONDENSE_QUESTION_PROMPT,
|
condense_question_prompt: BasePromptTemplate = CONDENSE_QUESTION_PROMPT,
|
||||||
qa_prompt: Optional[BasePromptTemplate] = None,
|
|
||||||
chain_type: str = "stuff",
|
chain_type: str = "stuff",
|
||||||
|
combine_docs_chain_kwargs: Optional[Dict] = None,
|
||||||
**kwargs: Any,
|
**kwargs: Any,
|
||||||
) -> BaseConversationalRetrievalChain:
|
) -> BaseConversationalRetrievalChain:
|
||||||
"""Load chain from LLM."""
|
"""Load chain from LLM."""
|
||||||
|
combine_docs_chain_kwargs = combine_docs_chain_kwargs or {}
|
||||||
doc_chain = load_qa_chain(
|
doc_chain = load_qa_chain(
|
||||||
llm,
|
llm,
|
||||||
chain_type=chain_type,
|
chain_type=chain_type,
|
||||||
prompt=qa_prompt,
|
**combine_docs_chain_kwargs,
|
||||||
)
|
)
|
||||||
condense_question_chain = LLMChain(llm=llm, prompt=condense_question_prompt)
|
condense_question_chain = LLMChain(llm=llm, prompt=condense_question_prompt)
|
||||||
return cls(
|
return cls(
|
||||||
|
Loading…
Reference in New Issue
Block a user