Output parsing variation allowance (#5178)

# Output parsing variation allowance for self-ask with search

This change makes self-ask with search easier for Llama models to
follow, as they tend toward returning 'Followup:' instead of 'Follow
up:' despite an otherwise valid remaining output.


Co-authored-by: Dev 2049 <dev.dev2049@gmail.com>
searx_updates
Alexander Dibrov 1 year ago committed by GitHub
parent c173bf1c62
commit d8eed6018f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -1,24 +1,21 @@
from typing import Union from typing import Sequence, Union
from langchain.agents.agent import AgentOutputParser from langchain.agents.agent import AgentOutputParser
from langchain.schema import AgentAction, AgentFinish, OutputParserException from langchain.schema import AgentAction, AgentFinish, OutputParserException
class SelfAskOutputParser(AgentOutputParser): class SelfAskOutputParser(AgentOutputParser):
followups: Sequence[str] = ("Follow up:", "Followup:")
finish_string: str = "So the final answer is: "
def parse(self, text: str) -> Union[AgentAction, AgentFinish]: def parse(self, text: str) -> Union[AgentAction, AgentFinish]:
followup = "Follow up:"
last_line = text.split("\n")[-1] last_line = text.split("\n")[-1]
if not any([follow in last_line for follow in self.followups]):
if followup not in last_line: if self.finish_string not in last_line:
finish_string = "So the final answer is: "
if finish_string not in last_line:
raise OutputParserException(f"Could not parse output: {text}") raise OutputParserException(f"Could not parse output: {text}")
return AgentFinish({"output": last_line[len(finish_string) :]}, text) return AgentFinish({"output": last_line[len(self.finish_string) :]}, text)
after_colon = text.split(":")[-1]
if " " == after_colon[0]: after_colon = text.split(":")[-1].strip()
after_colon = after_colon[1:]
return AgentAction("Intermediate Answer", after_colon, text) return AgentAction("Intermediate Answer", after_colon, text)
@property @property

Loading…
Cancel
Save