From 5aefc2b7ce7cdec19019906ef3a82bc09ed9ad31 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Wed, 4 Jan 2023 20:23:55 -0800 Subject: [PATCH] add handling on error (#541) --- langchain/agents/agent.py | 11 ++++++++--- langchain/chains/base.py | 7 ++++++- langchain/llms/base.py | 24 ++++++++++++++++++------ 3 files changed, 32 insertions(+), 10 deletions(-) diff --git a/langchain/agents/agent.py b/langchain/agents/agent.py index fd96f64b..dc669176 100644 --- a/langchain/agents/agent.py +++ b/langchain/agents/agent.py @@ -272,9 +272,14 @@ class AgentExecutor(Chain, BaseModel): self.callback_manager.on_tool_start( {"name": str(chain)[:60] + "..."}, output, color="green" ) - # We then call the tool on the tool input to get an observation - observation = chain(output.tool_input) - color = color_mapping[output.tool] + try: + # We then call the tool on the tool input to get an observation + observation = chain(output.tool_input) + color = color_mapping[output.tool] + except Exception as e: + if self.verbose: + self.callback_manager.on_tool_error(e) + raise e else: if self.verbose: self.callback_manager.on_tool_start( diff --git a/langchain/chains/base.py b/langchain/chains/base.py index af2e6cc1..83c1ea9f 100644 --- a/langchain/chains/base.py +++ b/langchain/chains/base.py @@ -138,7 +138,12 @@ class Chain(BaseModel, ABC): self.callback_manager.on_chain_start( {"name": self.__class__.__name__}, inputs ) - outputs = self._call(inputs) + try: + outputs = self._call(inputs) + except Exception as e: + if self.verbose: + self.callback_manager.on_chain_error(e) + raise e if self.verbose: self.callback_manager.on_chain_end(outputs) self._validate_outputs(outputs) diff --git a/langchain/llms/base.py b/langchain/llms/base.py index 90452ed5..b8d05ccd 100644 --- a/langchain/llms/base.py +++ b/langchain/llms/base.py @@ -73,7 +73,12 @@ class BaseLLM(BaseModel, ABC): self.callback_manager.on_llm_start( {"name": self.__class__.__name__}, prompts ) - output = self._generate(prompts, stop=stop) + try: + output = self._generate(prompts, stop=stop) + except Exception as e: + if self.verbose: + self.callback_manager.on_llm_error(e) + raise e if self.verbose: self.callback_manager.on_llm_end(output) return output @@ -90,11 +95,18 @@ class BaseLLM(BaseModel, ABC): else: missing_prompts.append(prompt) missing_prompt_idxs.append(i) - self.callback_manager.on_llm_start( - {"name": self.__class__.__name__}, missing_prompts - ) - new_results = self._generate(missing_prompts, stop=stop) - self.callback_manager.on_llm_end(new_results) + if self.verbose: + self.callback_manager.on_llm_start( + {"name": self.__class__.__name__}, missing_prompts + ) + try: + new_results = self._generate(missing_prompts, stop=stop) + except Exception as e: + if self.verbose: + self.callback_manager.on_llm_error(e) + raise e + if self.verbose: + self.callback_manager.on_llm_end(new_results) for i, result in enumerate(new_results.generations): existing_prompts[missing_prompt_idxs[i]] = result prompt = prompts[i]