forked from Archives/langchain
fix typing for LLMMathChain (#2183)
Fix typing in LLMMathChain to allow chat models (#1834). Might have been forgotten in related PR #1807.
This commit is contained in:
parent
3207a74829
commit
fd1fcb5a7d
@ -6,9 +6,9 @@ from pydantic import BaseModel, Extra
|
|||||||
from langchain.chains.base import Chain
|
from langchain.chains.base import Chain
|
||||||
from langchain.chains.llm import LLMChain
|
from langchain.chains.llm import LLMChain
|
||||||
from langchain.chains.llm_math.prompt import PROMPT
|
from langchain.chains.llm_math.prompt import PROMPT
|
||||||
from langchain.llms.base import BaseLLM
|
|
||||||
from langchain.prompts.base import BasePromptTemplate
|
from langchain.prompts.base import BasePromptTemplate
|
||||||
from langchain.python import PythonREPL
|
from langchain.python import PythonREPL
|
||||||
|
from langchain.schema import BaseLanguageModel
|
||||||
|
|
||||||
|
|
||||||
class LLMMathChain(Chain, BaseModel):
|
class LLMMathChain(Chain, BaseModel):
|
||||||
@ -21,7 +21,7 @@ class LLMMathChain(Chain, BaseModel):
|
|||||||
llm_math = LLMMathChain(llm=OpenAI())
|
llm_math = LLMMathChain(llm=OpenAI())
|
||||||
"""
|
"""
|
||||||
|
|
||||||
llm: BaseLLM
|
llm: BaseLanguageModel
|
||||||
"""LLM wrapper to use."""
|
"""LLM wrapper to use."""
|
||||||
prompt: BasePromptTemplate = PROMPT
|
prompt: BasePromptTemplate = PROMPT
|
||||||
"""Prompt to use to translate to python if neccessary."""
|
"""Prompt to use to translate to python if neccessary."""
|
||||||
|
Loading…
Reference in New Issue
Block a user