From 0b6993987f7b91591325d4e3d0a4e55f1cbda9ba Mon Sep 17 00:00:00 2001 From: Sajal Sharma Date: Mon, 4 Sep 2023 11:42:20 +0800 Subject: [PATCH] feature: add verbosity to create_qa_with_sources_chain (#9742) Adds a verbose parameter to the create_qa_with_sources_chain and create_qa_with_structure_chain functions --- .../chains/openai_functions/qa_with_structure.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/libs/langchain/langchain/chains/openai_functions/qa_with_structure.py b/libs/langchain/langchain/chains/openai_functions/qa_with_structure.py index dff17030a3..770b3e9d76 100644 --- a/libs/langchain/langchain/chains/openai_functions/qa_with_structure.py +++ b/libs/langchain/langchain/chains/openai_functions/qa_with_structure.py @@ -28,6 +28,7 @@ def create_qa_with_structure_chain( schema: Union[dict, Type[BaseModel]], output_parser: str = "base", prompt: Optional[Union[PromptTemplate, ChatPromptTemplate]] = None, + verbose: bool = False, ) -> LLMChain: """Create a question answering chain that returns an answer with sources based on schema. @@ -87,18 +88,24 @@ def create_qa_with_structure_chain( prompt=prompt, llm_kwargs=llm_kwargs, output_parser=_output_parser, + verbose=verbose, ) return chain -def create_qa_with_sources_chain(llm: BaseLanguageModel, **kwargs: Any) -> LLMChain: +def create_qa_with_sources_chain( + llm: BaseLanguageModel, verbose: bool = False, **kwargs: Any +) -> LLMChain: """Create a question answering chain that returns an answer with sources. Args: llm: Language model to use for the chain. + verbose: Whether to print the details of the chain **kwargs: Keyword arguments to pass to `create_qa_with_structure_chain`. Returns: Chain (LLMChain) that can be used to answer questions with citations. """ - return create_qa_with_structure_chain(llm, AnswerWithSources, **kwargs) + return create_qa_with_structure_chain( + llm, AnswerWithSources, verbose=verbose, **kwargs + )