fix prompt saving (#4987)

will add unit tests
searx_updates
Davis Chase 1 year ago committed by GitHub
parent 27e63b977a
commit 3bc0bf0079
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -559,7 +559,7 @@
},
{
"cell_type": "code",
"execution_count": 18,
"execution_count": 1,
"id": "0b6dd7b8",
"metadata": {},
"outputs": [
@ -631,6 +631,84 @@
"prompt = load_prompt(\"few_shot_prompt_example_prompt.json\")\n",
"print(prompt.format(adjective=\"funny\"))"
]
},
{
"cell_type": "markdown",
"id": "c6e3f9fe",
"metadata": {},
"source": [
"## PromptTempalte with OutputParser\n",
"This shows an example of loading a prompt along with an OutputParser from a file."
]
},
{
"cell_type": "code",
"execution_count": 13,
"id": "500dab26",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"{\r\n",
" \"input_variables\": [\r\n",
" \"question\",\r\n",
" \"student_answer\"\r\n",
" ],\r\n",
" \"output_parser\": {\r\n",
" \"regex\": \"(.*?)\\\\nScore: (.*)\",\r\n",
" \"output_keys\": [\r\n",
" \"answer\",\r\n",
" \"score\"\r\n",
" ],\r\n",
" \"default_output_key\": null,\r\n",
" \"_type\": \"regex_parser\"\r\n",
" },\r\n",
" \"partial_variables\": {},\r\n",
" \"template\": \"Given the following question and student answer, provide a correct answer and score the student answer.\\nQuestion: {question}\\nStudent Answer: {student_answer}\\nCorrect Answer:\",\r\n",
" \"template_format\": \"f-string\",\r\n",
" \"validate_template\": true,\r\n",
" \"_type\": \"prompt\"\r\n",
"}"
]
}
],
"source": [
"! cat prompt_with_output_parser.json"
]
},
{
"cell_type": "code",
"execution_count": 20,
"id": "d267a736",
"metadata": {},
"outputs": [],
"source": [
"prompt = load_prompt(\"prompt_with_output_parser.json\")"
]
},
{
"cell_type": "code",
"execution_count": 21,
"id": "cb770399",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"{'answer': 'George Washington was born in 1732 and died in 1799.',\n",
" 'score': '1/2'}"
]
},
"execution_count": 21,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"prompt.output_parser.parse(\"George Washington was born in 1732 and died in 1799.\\nScore: 1/2\")"
]
}
],
"metadata": {
@ -649,7 +727,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.1"
"version": "3.11.3"
},
"vscode": {
"interpreter": {

@ -0,0 +1,20 @@
{
"input_variables": [
"question",
"student_answer"
],
"output_parser": {
"regex": "(.*?)\nScore: (.*)",
"output_keys": [
"answer",
"score"
],
"default_output_key": null,
"_type": "regex_parser"
},
"partial_variables": {},
"template": "Given the following question and student answer, provide a correct answer and score the student answer.\nQuestion: {question}\nStudent Answer: {student_answer}\nCorrect Answer:",
"template_format": "f-string",
"validate_template": true,
"_type": "prompt"
}

@ -74,15 +74,14 @@ def _load_examples(config: dict) -> dict:
def _load_output_parser(config: dict) -> dict:
"""Load output parser."""
if "output_parsers" in config:
if config["output_parsers"] is not None:
_config = config["output_parsers"]
output_parser_type = _config["_type"]
if output_parser_type == "regex_parser":
output_parser = RegexParser(**_config)
else:
raise ValueError(f"Unsupported output parser {output_parser_type}")
config["output_parsers"] = output_parser
if "output_parser" in config and config["output_parser"]:
_config = config.pop("output_parser")
output_parser_type = _config.pop("_type")
if output_parser_type == "regex_parser":
output_parser = RegexParser(**_config)
else:
raise ValueError(f"Unsupported output parser {output_parser_type}")
config["output_parser"] = output_parser
return config

@ -5,6 +5,7 @@ from contextlib import contextmanager
from pathlib import Path
from typing import Iterator
from langchain.output_parsers import RegexParser
from langchain.prompts.few_shot import FewShotPromptTemplate
from langchain.prompts.loading import load_prompt
from langchain.prompts.prompt import PromptTemplate
@ -160,3 +161,24 @@ def test_loading_few_shot_prompt_example_prompt() -> None:
suffix="Input: {adjective}\nOutput:",
)
assert prompt == expected_prompt
def test_loading_with_output_parser() -> None:
with change_directory():
prompt = load_prompt("prompt_with_output_parser.json")
expected_template = """\
Given the following question and student answer, \
provide a correct answer and score the student answer.
Question: {question}
Student Answer: {student_answer}
Correct Answer:\
"""
expected_prompt = PromptTemplate(
input_variables=["question", "student_answer"],
output_parser=RegexParser(
regex="(.*?)\nScore: (.*)",
output_keys=["answer", "score"],
),
template=expected_template,
)
assert prompt == expected_prompt

Loading…
Cancel
Save