Expose kwargs in LLMChainExtractor.from_llm (#3748)

Re: https://github.com/hwchase17/langchain/issues/3747
This commit is contained in:
Rafal Wojdyla 2023-04-29 05:18:05 +01:00 committed by GitHub
parent 512c24fc9c
commit 57e028549a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -69,9 +69,10 @@ class LLMChainExtractor(BaseDocumentCompressor):
llm: BaseLanguageModel, llm: BaseLanguageModel,
prompt: Optional[PromptTemplate] = None, prompt: Optional[PromptTemplate] = None,
get_input: Optional[Callable[[str, Document], str]] = None, get_input: Optional[Callable[[str, Document], str]] = None,
llm_chain_kwargs: Optional[dict] = None,
) -> "LLMChainExtractor": ) -> "LLMChainExtractor":
"""Initialize from LLM.""" """Initialize from LLM."""
_prompt = prompt if prompt is not None else _get_default_chain_prompt() _prompt = prompt if prompt is not None else _get_default_chain_prompt()
_get_input = get_input if get_input is not None else default_get_input _get_input = get_input if get_input is not None else default_get_input
llm_chain = LLMChain(llm=llm, prompt=_prompt) llm_chain = LLMChain(llm=llm, prompt=_prompt, **(llm_chain_kwargs or {}))
return cls(llm_chain=llm_chain, get_input=_get_input) return cls(llm_chain=llm_chain, get_input=_get_input)