agent improvements

This commit is contained in:
Harrison Chase 2022-11-23 07:09:16 -08:00
parent 711a2436bb
commit 93db5470bb
2 changed files with 17 additions and 5 deletions

View File

@ -26,6 +26,7 @@ class Agent(Chain, BaseModel, ABC):
prompt: ClassVar[BasePromptTemplate] prompt: ClassVar[BasePromptTemplate]
llm_chain: LLMChain llm_chain: LLMChain
tools: List[Tool] tools: List[Tool]
retry_on_failed_tool: bool = False
input_key: str = "input" #: :meta private: input_key: str = "input" #: :meta private:
output_key: str = "output" #: :meta private: output_key: str = "output" #: :meta private:
@ -141,13 +142,21 @@ class Agent(Chain, BaseModel, ABC):
# If the tool chosen is the finishing tool, then we end and return. # If the tool chosen is the finishing tool, then we end and return.
if output.tool == self.finish_tool_name: if output.tool == self.finish_tool_name:
return {self.output_key: output.tool_input} return {self.output_key: output.tool_input}
# Otherwise we lookup the tool if output.tool not in name_to_tool_map:
chain = name_to_tool_map[output.tool] if self.retry_on_failed_tool:
# We then call the tool on the tool input to get an observation observation = f"Tool {output.tool} not found."
observation = chain(output.tool_input) color = None
else:
raise KeyError(observation)
else:
# Otherwise we lookup the tool
chain = name_to_tool_map[output.tool]
# We then call the tool on the tool input to get an observation
observation = chain(output.tool_input)
color = color_mapping[output.tool]
# We then log the observation # We then log the observation
chained_input.add(f"\n{self.observation_prefix}") chained_input.add(f"\n{self.observation_prefix}")
chained_input.add(observation, color=color_mapping[output.tool]) chained_input.add(observation, color=color)
# We then add the LLM prefix into the prompt to get the LLM to start # We then add the LLM prefix into the prompt to get the LLM to start
# thinking, and start the loop all over. # thinking, and start the loop all over.
chained_input.add(f"\n{self.llm_prefix}") chained_input.add(f"\n{self.llm_prefix}")

View File

@ -14,6 +14,9 @@ Observation: the result of the action
Thought: I now know the final answer Thought: I now know the final answer
Final Answer: the final answer to the original input question Final Answer: the final answer to the original input question
Do NOT take the same action with the same action input.
Only take actions with tools that exist.
Begin! Begin!
Question: {{input}}""" Question: {{input}}"""