From 7658263bfbc9485ebbc85b7d4c2476ea68611e26 Mon Sep 17 00:00:00 2001 From: Bagatur <22008038+baskaryan@users.noreply.github.com> Date: Sat, 4 Feb 2023 22:49:17 -0800 Subject: [PATCH] Check type of LLM.generate `prompts` arg (#886) Was passing prompt in directly as string and getting nonsense outputs. Had to inspect source code to realize that first arg should be a list. Could be nice if there was an explicit error or warning, seems like this could be a common mistake. --- langchain/llms/base.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/langchain/llms/base.py b/langchain/llms/base.py index 06c57153..269cdd77 100644 --- a/langchain/llms/base.py +++ b/langchain/llms/base.py @@ -62,6 +62,13 @@ class BaseLLM(BaseModel, ABC): self, prompts: List[str], stop: Optional[List[str]] = None ) -> LLMResult: """Run the LLM on the given prompt and input.""" + # If string is passed in directly no errors will be raised but outputs will + # not make sense. + if not isinstance(prompts, list): + raise ValueError( + "Argument 'prompts' is expected to be of type List[str], received" + f" argument of type {type(prompts)}." + ) disregard_cache = self.cache is not None and not self.cache if langchain.llm_cache is None or disregard_cache: # This happens when langchain.cache is None, but self.cache is True