Harrison/version 0032 (#283)

harrison/promot-mrkl
Harrison Chase 2 years ago committed by GitHub
parent e9b1c8cdfa
commit e2e501aa06
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -51,7 +51,7 @@
},
{
"cell_type": "code",
"execution_count": 12,
"execution_count": 3,
"id": "5ec2b55b",
"metadata": {},
"outputs": [],
@ -61,7 +61,7 @@
},
{
"cell_type": "code",
"execution_count": 13,
"execution_count": 4,
"id": "5286f58f",
"metadata": {},
"outputs": [],
@ -72,7 +72,7 @@
},
{
"cell_type": "code",
"execution_count": 3,
"execution_count": 5,
"id": "005a47e9",
"metadata": {},
"outputs": [],
@ -93,7 +93,7 @@
},
{
"cell_type": "code",
"execution_count": 4,
"execution_count": 6,
"id": "fc1a5ed6",
"metadata": {},
"outputs": [],
@ -103,7 +103,7 @@
},
{
"cell_type": "code",
"execution_count": 4,
"execution_count": 7,
"id": "e239964b",
"metadata": {},
"outputs": [],
@ -187,7 +187,7 @@
},
{
"cell_type": "code",
"execution_count": 9,
"execution_count": 11,
"id": "904835c8",
"metadata": {},
"outputs": [],
@ -197,24 +197,24 @@
},
{
"cell_type": "code",
"execution_count": 15,
"execution_count": 13,
"id": "f60875c6",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"{'output_text': \"\\n\\nThe president said that Justice Breyer has dedicated his life to serve the country and has left a legacy of excellence. He also thanked Justice Breyer for his service and for his commitment to advancing liberty and justice, including protecting the rights of women and the constitutional right affirmed in Roe v. Wade, preserving access to health care and a woman's right to choose, and advancing the bipartisan Equality Act to protect LGBTQ+ Americans. The president also noted that the State of the Union is strong because of the courage and determination of the American people, and that the nation will meet and overcome the challenges of our time as one people, just as the Ukrainian people have done in the face of adversity. Source: 0, 29, 35\"}"
"{'output_text': \"\\n\\nThe president did not mention Justice Breyer in his speech to the European Parliament, which focused on building a coalition of freedom-loving nations to confront Putin, unifying European allies, countering Russia's lies with truth, and enforcing powerful economic sanctions. Source: 2\"}"
]
},
"execution_count": 15,
"execution_count": 13,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"query = \"What did the president say about Justice Breyer\"\n",
"chain({\"input_documents\": docs, \"query_str\": query}, return_only_outputs=True)"
"chain({\"input_documents\": docs, \"question\": query}, return_only_outputs=True)"
]
},
{

@ -185,7 +185,7 @@
},
{
"cell_type": "code",
"execution_count": 8,
"execution_count": 11,
"id": "fb167057",
"metadata": {},
"outputs": [],
@ -195,24 +195,24 @@
},
{
"cell_type": "code",
"execution_count": 10,
"execution_count": 13,
"id": "d8b5286e",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"{'output_text': '\\n\\nThe president did not mention Justice Breyer in the given page content.'}"
"{'output_text': \"\\n\\nThe president did not mention Justice Breyer in his speech to the European Parliament about building a coalition of freedom-loving nations to confront Putin, unifying European allies, countering Russia's lies with truth, and enforcing powerful economic sanctions.\"}"
]
},
"execution_count": 10,
"execution_count": 13,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"query = \"What did the president say about Justice Breyer\"\n",
"chain({\"input_documents\": docs, \"query_str\": query}, return_only_outputs=True)"
"chain({\"input_documents\": docs, \"question\": query}, return_only_outputs=True)"
]
},
{

@ -5,7 +5,7 @@
"id": "efc5be67",
"metadata": {},
"source": [
"# Question-Answering with Sources\n",
"# VectorDB Question Ansering with Sources\n",
"\n",
"This notebook goes over how to do question-answering with sources. It does this in a few different ways - first showing how you can use the `QAWithSourcesChain` to take in documents and use those, and next showing the `VectorDBQAWithSourcesChain`, which also does the lookup of the documents from a vector database. "
]
@ -138,7 +138,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.1"
"version": "3.10.8"
}
},
"nbformat": 4,

@ -2,7 +2,7 @@
from langchain.prompts import PromptTemplate
DEFAULT_REFINE_PROMPT_TMPL = (
"The original question is as follows: {query_str}\n"
"The original question is as follows: {question}\n"
"We have provided an existing answer, including sources: {existing_answer}\n"
"We have the opportunity to refine the existing answer"
"(only if needed) with some more context below.\n"
@ -15,7 +15,7 @@ DEFAULT_REFINE_PROMPT_TMPL = (
"If the context isn't useful, return the original answer."
)
DEFAULT_REFINE_PROMPT = PromptTemplate(
input_variables=["query_str", "existing_answer", "context_str"],
input_variables=["question", "existing_answer", "context_str"],
template=DEFAULT_REFINE_PROMPT_TMPL,
)
@ -26,10 +26,10 @@ DEFAULT_TEXT_QA_PROMPT_TMPL = (
"{context_str}"
"\n---------------------\n"
"Given the context information and not prior knowledge, "
"answer the question: {query_str}\n"
"answer the question: {question}\n"
)
DEFAULT_TEXT_QA_PROMPT = PromptTemplate(
input_variables=["context_str", "query_str"], template=DEFAULT_TEXT_QA_PROMPT_TMPL
input_variables=["context_str", "question"], template=DEFAULT_TEXT_QA_PROMPT_TMPL
)
EXAMPLE_PROMPT = PromptTemplate(

@ -2,7 +2,7 @@
from langchain.prompts import PromptTemplate
DEFAULT_REFINE_PROMPT_TMPL = (
"The original question is as follows: {query_str}\n"
"The original question is as follows: {question}\n"
"We have provided an existing answer: {existing_answer}\n"
"We have the opportunity to refine the existing answer"
"(only if needed) with some more context below.\n"
@ -14,7 +14,7 @@ DEFAULT_REFINE_PROMPT_TMPL = (
"If the context isn't useful, return the original answer."
)
DEFAULT_REFINE_PROMPT = PromptTemplate(
input_variables=["query_str", "existing_answer", "context_str"],
input_variables=["question", "existing_answer", "context_str"],
template=DEFAULT_REFINE_PROMPT_TMPL,
)
@ -25,8 +25,8 @@ DEFAULT_TEXT_QA_PROMPT_TMPL = (
"{context_str}"
"\n---------------------\n"
"Given the context information and not prior knowledge, "
"answer the question: {query_str}\n"
"answer the question: {question}\n"
)
DEFAULT_TEXT_QA_PROMPT = PromptTemplate(
input_variables=["context_str", "query_str"], template=DEFAULT_TEXT_QA_PROMPT_TMPL
input_variables=["context_str", "question"], template=DEFAULT_TEXT_QA_PROMPT_TMPL
)

@ -1,6 +1,6 @@
[tool.poetry]
name = "langchain"
version = "0.0.31"
version = "0.0.32"
description = "Building applications with LLMs through composability"
authors = []
license = "MIT"

Loading…
Cancel
Save