mirror of
https://github.com/hwchase17/langchain
synced 2024-11-06 03:20:49 +00:00
Implemented MMR search for PGVector (#10396)
Description: Implemented MMR search for PGVector. Issue: #7466 Dependencies: None Tag maintainer: Twitter handle: @JohnMai95
This commit is contained in:
parent
90504fc499
commit
e0d45e6a09
@ -24,42 +24,11 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 60,
|
"execution_count": null,
|
||||||
"metadata": {
|
"metadata": {
|
||||||
"tags": []
|
"tags": []
|
||||||
},
|
},
|
||||||
"outputs": [
|
"outputs": [],
|
||||||
{
|
|
||||||
"name": "stdout",
|
|
||||||
"output_type": "stream",
|
|
||||||
"text": [
|
|
||||||
"Requirement already satisfied: pgvector in /Users/joyeed/langchain/langchain/.venv/lib/python3.9/site-packages (0.1.8)\n",
|
|
||||||
"Requirement already satisfied: numpy in /Users/joyeed/langchain/langchain/.venv/lib/python3.9/site-packages (from pgvector) (1.24.3)\n",
|
|
||||||
"Requirement already satisfied: openai in /Users/joyeed/langchain/langchain/.venv/lib/python3.9/site-packages (0.27.7)\n",
|
|
||||||
"Requirement already satisfied: requests>=2.20 in /Users/joyeed/langchain/langchain/.venv/lib/python3.9/site-packages (from openai) (2.28.2)\n",
|
|
||||||
"Requirement already satisfied: tqdm in /Users/joyeed/langchain/langchain/.venv/lib/python3.9/site-packages (from openai) (4.65.0)\n",
|
|
||||||
"Requirement already satisfied: aiohttp in /Users/joyeed/langchain/langchain/.venv/lib/python3.9/site-packages (from openai) (3.8.4)\n",
|
|
||||||
"Requirement already satisfied: charset-normalizer<4,>=2 in /Users/joyeed/langchain/langchain/.venv/lib/python3.9/site-packages (from requests>=2.20->openai) (3.1.0)\n",
|
|
||||||
"Requirement already satisfied: idna<4,>=2.5 in /Users/joyeed/langchain/langchain/.venv/lib/python3.9/site-packages (from requests>=2.20->openai) (3.4)\n",
|
|
||||||
"Requirement already satisfied: urllib3<1.27,>=1.21.1 in /Users/joyeed/langchain/langchain/.venv/lib/python3.9/site-packages (from requests>=2.20->openai) (1.26.15)\n",
|
|
||||||
"Requirement already satisfied: certifi>=2017.4.17 in /Users/joyeed/langchain/langchain/.venv/lib/python3.9/site-packages (from requests>=2.20->openai) (2023.5.7)\n",
|
|
||||||
"Requirement already satisfied: attrs>=17.3.0 in /Users/joyeed/langchain/langchain/.venv/lib/python3.9/site-packages (from aiohttp->openai) (23.1.0)\n",
|
|
||||||
"Requirement already satisfied: multidict<7.0,>=4.5 in /Users/joyeed/langchain/langchain/.venv/lib/python3.9/site-packages (from aiohttp->openai) (6.0.4)\n",
|
|
||||||
"Requirement already satisfied: async-timeout<5.0,>=4.0.0a3 in /Users/joyeed/langchain/langchain/.venv/lib/python3.9/site-packages (from aiohttp->openai) (4.0.2)\n",
|
|
||||||
"Requirement already satisfied: yarl<2.0,>=1.0 in /Users/joyeed/langchain/langchain/.venv/lib/python3.9/site-packages (from aiohttp->openai) (1.9.2)\n",
|
|
||||||
"Requirement already satisfied: frozenlist>=1.1.1 in /Users/joyeed/langchain/langchain/.venv/lib/python3.9/site-packages (from aiohttp->openai) (1.3.3)\n",
|
|
||||||
"Requirement already satisfied: aiosignal>=1.1.2 in /Users/joyeed/langchain/langchain/.venv/lib/python3.9/site-packages (from aiohttp->openai) (1.3.1)\n",
|
|
||||||
"Requirement already satisfied: psycopg2-binary in /Users/joyeed/langchain/langchain/.venv/lib/python3.9/site-packages (2.9.6)\n",
|
|
||||||
"Requirement already satisfied: tiktoken in /Users/joyeed/langchain/langchain/.venv/lib/python3.9/site-packages (0.4.0)\n",
|
|
||||||
"Requirement already satisfied: regex>=2022.1.18 in /Users/joyeed/langchain/langchain/.venv/lib/python3.9/site-packages (from tiktoken) (2023.5.5)\n",
|
|
||||||
"Requirement already satisfied: requests>=2.26.0 in /Users/joyeed/langchain/langchain/.venv/lib/python3.9/site-packages (from tiktoken) (2.28.2)\n",
|
|
||||||
"Requirement already satisfied: charset-normalizer<4,>=2 in /Users/joyeed/langchain/langchain/.venv/lib/python3.9/site-packages (from requests>=2.26.0->tiktoken) (3.1.0)\n",
|
|
||||||
"Requirement already satisfied: idna<4,>=2.5 in /Users/joyeed/langchain/langchain/.venv/lib/python3.9/site-packages (from requests>=2.26.0->tiktoken) (3.4)\n",
|
|
||||||
"Requirement already satisfied: urllib3<1.27,>=1.21.1 in /Users/joyeed/langchain/langchain/.venv/lib/python3.9/site-packages (from requests>=2.26.0->tiktoken) (1.26.15)\n",
|
|
||||||
"Requirement already satisfied: certifi>=2017.4.17 in /Users/joyeed/langchain/langchain/.venv/lib/python3.9/site-packages (from requests>=2.26.0->tiktoken) (2023.5.7)\n"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"source": [
|
"source": [
|
||||||
"# Pip install necessary package\n",
|
"# Pip install necessary package\n",
|
||||||
"!pip install pgvector\n",
|
"!pip install pgvector\n",
|
||||||
@ -77,17 +46,14 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 19,
|
"execution_count": 2,
|
||||||
"metadata": {},
|
"metadata": {
|
||||||
"outputs": [
|
"ExecuteTime": {
|
||||||
{
|
"end_time": "2023-09-09T08:02:16.802456Z",
|
||||||
"name": "stdout",
|
"start_time": "2023-09-09T08:02:07.065604Z"
|
||||||
"output_type": "stream",
|
|
||||||
"text": [
|
|
||||||
"OpenAI API Key:········\n"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
],
|
},
|
||||||
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"import os\n",
|
"import os\n",
|
||||||
"import getpass\n",
|
"import getpass\n",
|
||||||
@ -97,18 +63,20 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 61,
|
"execution_count": 3,
|
||||||
"metadata": {
|
"metadata": {
|
||||||
"tags": []
|
"tags": [],
|
||||||
|
"ExecuteTime": {
|
||||||
|
"end_time": "2023-09-09T08:02:19.742896Z",
|
||||||
|
"start_time": "2023-09-09T08:02:19.732527Z"
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"outputs": [
|
"outputs": [
|
||||||
{
|
{
|
||||||
"data": {
|
"data": {
|
||||||
"text/plain": [
|
"text/plain": "False"
|
||||||
"False"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"execution_count": 61,
|
"execution_count": 3,
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"output_type": "execute_result"
|
"output_type": "execute_result"
|
||||||
}
|
}
|
||||||
@ -123,9 +91,13 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 1,
|
"execution_count": 4,
|
||||||
"metadata": {
|
"metadata": {
|
||||||
"tags": []
|
"tags": [],
|
||||||
|
"ExecuteTime": {
|
||||||
|
"end_time": "2023-09-09T08:02:23.144824Z",
|
||||||
|
"start_time": "2023-09-09T08:02:22.047801Z"
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
@ -138,8 +110,13 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 2,
|
"execution_count": 5,
|
||||||
"metadata": {},
|
"metadata": {
|
||||||
|
"ExecuteTime": {
|
||||||
|
"end_time": "2023-09-09T08:02:25.452472Z",
|
||||||
|
"start_time": "2023-09-09T08:02:25.441563Z"
|
||||||
|
}
|
||||||
|
},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"loader = TextLoader(\"../../../state_of_the_union.txt\")\n",
|
"loader = TextLoader(\"../../../state_of_the_union.txt\")\n",
|
||||||
@ -152,8 +129,13 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 3,
|
"execution_count": 6,
|
||||||
"metadata": {},
|
"metadata": {
|
||||||
|
"ExecuteTime": {
|
||||||
|
"end_time": "2023-09-09T08:02:28.174088Z",
|
||||||
|
"start_time": "2023-09-09T08:02:28.162698Z"
|
||||||
|
}
|
||||||
|
},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"# PGVector needs the connection string to the database.\n",
|
"# PGVector needs the connection string to the database.\n",
|
||||||
@ -174,15 +156,22 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"metadata": {},
|
|
||||||
"source": [
|
"source": [
|
||||||
"## Similarity Search with Euclidean Distance (Default)"
|
"## Similarity Search with Euclidean Distance (Default)"
|
||||||
]
|
],
|
||||||
|
"metadata": {
|
||||||
|
"collapsed": false
|
||||||
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 16,
|
"execution_count": 7,
|
||||||
"metadata": {},
|
"metadata": {
|
||||||
|
"ExecuteTime": {
|
||||||
|
"end_time": "2023-09-09T08:04:16.696625Z",
|
||||||
|
"start_time": "2023-09-09T08:02:31.817790Z"
|
||||||
|
}
|
||||||
|
},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"# The PGVector Module will try to create a table with the name of the collection.\n",
|
"# The PGVector Module will try to create a table with the name of the collection.\n",
|
||||||
@ -200,8 +189,13 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 17,
|
"execution_count": 8,
|
||||||
"metadata": {},
|
"metadata": {
|
||||||
|
"ExecuteTime": {
|
||||||
|
"end_time": "2023-09-09T08:05:11.104135Z",
|
||||||
|
"start_time": "2023-09-09T08:05:10.548998Z"
|
||||||
|
}
|
||||||
|
},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"query = \"What did the president say about Ketanji Brown Jackson\"\n",
|
"query = \"What did the president say about Ketanji Brown Jackson\"\n",
|
||||||
@ -210,15 +204,20 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 18,
|
"execution_count": 9,
|
||||||
"metadata": {},
|
"metadata": {
|
||||||
|
"ExecuteTime": {
|
||||||
|
"end_time": "2023-09-09T08:05:13.532334Z",
|
||||||
|
"start_time": "2023-09-09T08:05:13.523191Z"
|
||||||
|
}
|
||||||
|
},
|
||||||
"outputs": [
|
"outputs": [
|
||||||
{
|
{
|
||||||
"name": "stdout",
|
"name": "stdout",
|
||||||
"output_type": "stream",
|
"output_type": "stream",
|
||||||
"text": [
|
"text": [
|
||||||
"--------------------------------------------------------------------------------\n",
|
"--------------------------------------------------------------------------------\n",
|
||||||
"Score: 0.18460171628856903\n",
|
"Score: 0.18456886638850434\n",
|
||||||
"Tonight. I call on the Senate to: Pass the Freedom to Vote Act. Pass the John Lewis Voting Rights Act. And while you’re at it, pass the Disclose Act so Americans can know who is funding our elections. \n",
|
"Tonight. I call on the Senate to: Pass the Freedom to Vote Act. Pass the John Lewis Voting Rights Act. And while you’re at it, pass the Disclose Act so Americans can know who is funding our elections. \n",
|
||||||
"\n",
|
"\n",
|
||||||
"Tonight, I’d like to honor someone who has dedicated his life to serve this country: Justice Stephen Breyer—an Army veteran, Constitutional scholar, and retiring Justice of the United States Supreme Court. Justice Breyer, thank you for your service. \n",
|
"Tonight, I’d like to honor someone who has dedicated his life to serve this country: Justice Stephen Breyer—an Army veteran, Constitutional scholar, and retiring Justice of the United States Supreme Court. Justice Breyer, thank you for your service. \n",
|
||||||
@ -228,27 +227,7 @@
|
|||||||
"And I did that 4 days ago, when I nominated Circuit Court of Appeals Judge Ketanji Brown Jackson. One of our nation’s top legal minds, who will continue Justice Breyer’s legacy of excellence.\n",
|
"And I did that 4 days ago, when I nominated Circuit Court of Appeals Judge Ketanji Brown Jackson. One of our nation’s top legal minds, who will continue Justice Breyer’s legacy of excellence.\n",
|
||||||
"--------------------------------------------------------------------------------\n",
|
"--------------------------------------------------------------------------------\n",
|
||||||
"--------------------------------------------------------------------------------\n",
|
"--------------------------------------------------------------------------------\n",
|
||||||
"Score: 0.18460171628856903\n",
|
"Score: 0.21742627672631343\n",
|
||||||
"Tonight. I call on the Senate to: Pass the Freedom to Vote Act. Pass the John Lewis Voting Rights Act. And while you’re at it, pass the Disclose Act so Americans can know who is funding our elections. \n",
|
|
||||||
"\n",
|
|
||||||
"Tonight, I’d like to honor someone who has dedicated his life to serve this country: Justice Stephen Breyer—an Army veteran, Constitutional scholar, and retiring Justice of the United States Supreme Court. Justice Breyer, thank you for your service. \n",
|
|
||||||
"\n",
|
|
||||||
"One of the most serious constitutional responsibilities a President has is nominating someone to serve on the United States Supreme Court. \n",
|
|
||||||
"\n",
|
|
||||||
"And I did that 4 days ago, when I nominated Circuit Court of Appeals Judge Ketanji Brown Jackson. One of our nation’s top legal minds, who will continue Justice Breyer’s legacy of excellence.\n",
|
|
||||||
"--------------------------------------------------------------------------------\n",
|
|
||||||
"--------------------------------------------------------------------------------\n",
|
|
||||||
"Score: 0.18470284560586236\n",
|
|
||||||
"Tonight. I call on the Senate to: Pass the Freedom to Vote Act. Pass the John Lewis Voting Rights Act. And while you’re at it, pass the Disclose Act so Americans can know who is funding our elections. \n",
|
|
||||||
"\n",
|
|
||||||
"Tonight, I’d like to honor someone who has dedicated his life to serve this country: Justice Stephen Breyer—an Army veteran, Constitutional scholar, and retiring Justice of the United States Supreme Court. Justice Breyer, thank you for your service. \n",
|
|
||||||
"\n",
|
|
||||||
"One of the most serious constitutional responsibilities a President has is nominating someone to serve on the United States Supreme Court. \n",
|
|
||||||
"\n",
|
|
||||||
"And I did that 4 days ago, when I nominated Circuit Court of Appeals Judge Ketanji Brown Jackson. One of our nation’s top legal minds, who will continue Justice Breyer’s legacy of excellence.\n",
|
|
||||||
"--------------------------------------------------------------------------------\n",
|
|
||||||
"--------------------------------------------------------------------------------\n",
|
|
||||||
"Score: 0.21730864082247825\n",
|
|
||||||
"A former top litigator in private practice. A former federal public defender. And from a family of public school educators and police officers. A consensus builder. Since she’s been nominated, she’s received a broad range of support—from the Fraternal Order of Police to former judges appointed by Democrats and Republicans. \n",
|
"A former top litigator in private practice. A former federal public defender. And from a family of public school educators and police officers. A consensus builder. Since she’s been nominated, she’s received a broad range of support—from the Fraternal Order of Police to former judges appointed by Democrats and Republicans. \n",
|
||||||
"\n",
|
"\n",
|
||||||
"And if we are to advance liberty and justice, we need to secure the Border and fix the immigration system. \n",
|
"And if we are to advance liberty and justice, we need to secure the Border and fix the immigration system. \n",
|
||||||
@ -260,6 +239,38 @@
|
|||||||
"We’re putting in place dedicated immigration judges so families fleeing persecution and violence can have their cases heard faster. \n",
|
"We’re putting in place dedicated immigration judges so families fleeing persecution and violence can have their cases heard faster. \n",
|
||||||
"\n",
|
"\n",
|
||||||
"We’re securing commitments and supporting partners in South and Central America to host more refugees and secure their own borders.\n",
|
"We’re securing commitments and supporting partners in South and Central America to host more refugees and secure their own borders.\n",
|
||||||
|
"--------------------------------------------------------------------------------\n",
|
||||||
|
"--------------------------------------------------------------------------------\n",
|
||||||
|
"Score: 0.22641793174529334\n",
|
||||||
|
"And for our LGBTQ+ Americans, let’s finally get the bipartisan Equality Act to my desk. The onslaught of state laws targeting transgender Americans and their families is wrong. \n",
|
||||||
|
"\n",
|
||||||
|
"As I said last year, especially to our younger transgender Americans, I will always have your back as your President, so you can be yourself and reach your God-given potential. \n",
|
||||||
|
"\n",
|
||||||
|
"While it often appears that we never agree, that isn’t true. I signed 80 bipartisan bills into law last year. From preventing government shutdowns to protecting Asian-Americans from still-too-common hate crimes to reforming military justice. \n",
|
||||||
|
"\n",
|
||||||
|
"And soon, we’ll strengthen the Violence Against Women Act that I first wrote three decades ago. It is important for us to show the nation that we can come together and do big things. \n",
|
||||||
|
"\n",
|
||||||
|
"So tonight I’m offering a Unity Agenda for the Nation. Four big things we can do together. \n",
|
||||||
|
"\n",
|
||||||
|
"First, beat the opioid epidemic.\n",
|
||||||
|
"--------------------------------------------------------------------------------\n",
|
||||||
|
"--------------------------------------------------------------------------------\n",
|
||||||
|
"Score: 0.22670040608054465\n",
|
||||||
|
"Tonight, I’m announcing a crackdown on these companies overcharging American businesses and consumers. \n",
|
||||||
|
"\n",
|
||||||
|
"And as Wall Street firms take over more nursing homes, quality in those homes has gone down and costs have gone up. \n",
|
||||||
|
"\n",
|
||||||
|
"That ends on my watch. \n",
|
||||||
|
"\n",
|
||||||
|
"Medicare is going to set higher standards for nursing homes and make sure your loved ones get the care they deserve and expect. \n",
|
||||||
|
"\n",
|
||||||
|
"We’ll also cut costs and keep the economy going strong by giving workers a fair shot, provide more training and apprenticeships, hire them based on their skills not degrees. \n",
|
||||||
|
"\n",
|
||||||
|
"Let’s pass the Paycheck Fairness Act and paid leave. \n",
|
||||||
|
"\n",
|
||||||
|
"Raise the minimum wage to $15 an hour and extend the Child Tax Credit, so no one has to raise a family in poverty. \n",
|
||||||
|
"\n",
|
||||||
|
"Let’s increase Pell Grants and increase our historic support of HBCUs, and invest in what Jill—our First Lady who teaches full-time—calls America’s best-kept secret: community colleges.\n",
|
||||||
"--------------------------------------------------------------------------------\n"
|
"--------------------------------------------------------------------------------\n"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
@ -272,6 +283,131 @@
|
|||||||
" print(\"-\" * 80)"
|
" print(\"-\" * 80)"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"source": [
|
||||||
|
"## Maximal Marginal Relevance Search (MMR)\n",
|
||||||
|
"Maximal marginal relevance optimizes for similarity to query AND diversity among selected documents."
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"collapsed": false
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 10,
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"docs_with_score = db.max_marginal_relevance_search_with_score(query)"
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"collapsed": false,
|
||||||
|
"ExecuteTime": {
|
||||||
|
"end_time": "2023-09-09T08:05:23.276819Z",
|
||||||
|
"start_time": "2023-09-09T08:05:21.972256Z"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 11,
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"--------------------------------------------------------------------------------\n",
|
||||||
|
"Score: 0.18453882564037527\n",
|
||||||
|
"Tonight. I call on the Senate to: Pass the Freedom to Vote Act. Pass the John Lewis Voting Rights Act. And while you’re at it, pass the Disclose Act so Americans can know who is funding our elections. \n",
|
||||||
|
"\n",
|
||||||
|
"Tonight, I’d like to honor someone who has dedicated his life to serve this country: Justice Stephen Breyer—an Army veteran, Constitutional scholar, and retiring Justice of the United States Supreme Court. Justice Breyer, thank you for your service. \n",
|
||||||
|
"\n",
|
||||||
|
"One of the most serious constitutional responsibilities a President has is nominating someone to serve on the United States Supreme Court. \n",
|
||||||
|
"\n",
|
||||||
|
"And I did that 4 days ago, when I nominated Circuit Court of Appeals Judge Ketanji Brown Jackson. One of our nation’s top legal minds, who will continue Justice Breyer’s legacy of excellence.\n",
|
||||||
|
"--------------------------------------------------------------------------------\n",
|
||||||
|
"--------------------------------------------------------------------------------\n",
|
||||||
|
"Score: 0.23523731441720075\n",
|
||||||
|
"We can’t change how divided we’ve been. But we can change how we move forward—on COVID-19 and other issues we must face together. \n",
|
||||||
|
"\n",
|
||||||
|
"I recently visited the New York City Police Department days after the funerals of Officer Wilbert Mora and his partner, Officer Jason Rivera. \n",
|
||||||
|
"\n",
|
||||||
|
"They were responding to a 9-1-1 call when a man shot and killed them with a stolen gun. \n",
|
||||||
|
"\n",
|
||||||
|
"Officer Mora was 27 years old. \n",
|
||||||
|
"\n",
|
||||||
|
"Officer Rivera was 22. \n",
|
||||||
|
"\n",
|
||||||
|
"Both Dominican Americans who’d grown up on the same streets they later chose to patrol as police officers. \n",
|
||||||
|
"\n",
|
||||||
|
"I spoke with their families and told them that we are forever in debt for their sacrifice, and we will carry on their mission to restore the trust and safety every community deserves. \n",
|
||||||
|
"\n",
|
||||||
|
"I’ve worked on these issues a long time. \n",
|
||||||
|
"\n",
|
||||||
|
"I know what works: Investing in crime preventionand community police officers who’ll walk the beat, who’ll know the neighborhood, and who can restore trust and safety.\n",
|
||||||
|
"--------------------------------------------------------------------------------\n",
|
||||||
|
"--------------------------------------------------------------------------------\n",
|
||||||
|
"Score: 0.2448441215698569\n",
|
||||||
|
"One was stationed at bases and breathing in toxic smoke from “burn pits” that incinerated wastes of war—medical and hazard material, jet fuel, and more. \n",
|
||||||
|
"\n",
|
||||||
|
"When they came home, many of the world’s fittest and best trained warriors were never the same. \n",
|
||||||
|
"\n",
|
||||||
|
"Headaches. Numbness. Dizziness. \n",
|
||||||
|
"\n",
|
||||||
|
"A cancer that would put them in a flag-draped coffin. \n",
|
||||||
|
"\n",
|
||||||
|
"I know. \n",
|
||||||
|
"\n",
|
||||||
|
"One of those soldiers was my son Major Beau Biden. \n",
|
||||||
|
"\n",
|
||||||
|
"We don’t know for sure if a burn pit was the cause of his brain cancer, or the diseases of so many of our troops. \n",
|
||||||
|
"\n",
|
||||||
|
"But I’m committed to finding out everything we can. \n",
|
||||||
|
"\n",
|
||||||
|
"Committed to military families like Danielle Robinson from Ohio. \n",
|
||||||
|
"\n",
|
||||||
|
"The widow of Sergeant First Class Heath Robinson. \n",
|
||||||
|
"\n",
|
||||||
|
"He was born a soldier. Army National Guard. Combat medic in Kosovo and Iraq. \n",
|
||||||
|
"\n",
|
||||||
|
"Stationed near Baghdad, just yards from burn pits the size of football fields. \n",
|
||||||
|
"\n",
|
||||||
|
"Heath’s widow Danielle is here with us tonight. They loved going to Ohio State football games. He loved building Legos with their daughter.\n",
|
||||||
|
"--------------------------------------------------------------------------------\n",
|
||||||
|
"--------------------------------------------------------------------------------\n",
|
||||||
|
"Score: 0.2513994424701056\n",
|
||||||
|
"And I’m taking robust action to make sure the pain of our sanctions is targeted at Russia’s economy. And I will use every tool at our disposal to protect American businesses and consumers. \n",
|
||||||
|
"\n",
|
||||||
|
"Tonight, I can announce that the United States has worked with 30 other countries to release 60 Million barrels of oil from reserves around the world. \n",
|
||||||
|
"\n",
|
||||||
|
"America will lead that effort, releasing 30 Million barrels from our own Strategic Petroleum Reserve. And we stand ready to do more if necessary, unified with our allies. \n",
|
||||||
|
"\n",
|
||||||
|
"These steps will help blunt gas prices here at home. And I know the news about what’s happening can seem alarming. \n",
|
||||||
|
"\n",
|
||||||
|
"But I want you to know that we are going to be okay. \n",
|
||||||
|
"\n",
|
||||||
|
"When the history of this era is written Putin’s war on Ukraine will have left Russia weaker and the rest of the world stronger. \n",
|
||||||
|
"\n",
|
||||||
|
"While it shouldn’t have taken something so terrible for people around the world to see what’s at stake now everyone sees it clearly.\n",
|
||||||
|
"--------------------------------------------------------------------------------\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"for doc, score in docs_with_score:\n",
|
||||||
|
" print(\"-\" * 80)\n",
|
||||||
|
" print(\"Score: \", score)\n",
|
||||||
|
" print(doc.page_content)\n",
|
||||||
|
" print(\"-\" * 80)"
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"collapsed": false,
|
||||||
|
"ExecuteTime": {
|
||||||
|
"end_time": "2023-09-09T08:05:27.478580Z",
|
||||||
|
"start_time": "2023-09-09T08:05:27.470138Z"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
|
@ -1,9 +1,11 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
import contextlib
|
import contextlib
|
||||||
import enum
|
import enum
|
||||||
import logging
|
import logging
|
||||||
import uuid
|
import uuid
|
||||||
|
from functools import partial
|
||||||
from typing import (
|
from typing import (
|
||||||
TYPE_CHECKING,
|
TYPE_CHECKING,
|
||||||
Any,
|
Any,
|
||||||
@ -17,6 +19,7 @@ from typing import (
|
|||||||
Type,
|
Type,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
import sqlalchemy
|
import sqlalchemy
|
||||||
from sqlalchemy import delete
|
from sqlalchemy import delete
|
||||||
from sqlalchemy.dialects.postgresql import UUID
|
from sqlalchemy.dialects.postgresql import UUID
|
||||||
@ -26,6 +29,7 @@ from langchain.docstore.document import Document
|
|||||||
from langchain.embeddings.base import Embeddings
|
from langchain.embeddings.base import Embeddings
|
||||||
from langchain.utils import get_from_dict_or_env
|
from langchain.utils import get_from_dict_or_env
|
||||||
from langchain.vectorstores.base import VectorStore
|
from langchain.vectorstores.base import VectorStore
|
||||||
|
from langchain.vectorstores.utils import maximal_marginal_relevance
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from langchain.vectorstores._pgvector_data_models import CollectionStore
|
from langchain.vectorstores._pgvector_data_models import CollectionStore
|
||||||
@ -54,6 +58,11 @@ class BaseModel(Base):
|
|||||||
uuid = sqlalchemy.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
uuid = sqlalchemy.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||||
|
|
||||||
|
|
||||||
|
def _results_to_docs(docs_and_scores: Any) -> List[Document]:
|
||||||
|
"""Return docs from docs and scores."""
|
||||||
|
return [doc for doc, _ in docs_and_scores]
|
||||||
|
|
||||||
|
|
||||||
class PGVector(VectorStore):
|
class PGVector(VectorStore):
|
||||||
"""`Postgres`/`PGVector` vector store.
|
"""`Postgres`/`PGVector` vector store.
|
||||||
|
|
||||||
@ -339,7 +348,7 @@ class PGVector(VectorStore):
|
|||||||
filter (Optional[Dict[str, str]]): Filter by metadata. Defaults to None.
|
filter (Optional[Dict[str, str]]): Filter by metadata. Defaults to None.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
List of Documents most similar to the query and score for each
|
List of Documents most similar to the query and score for each.
|
||||||
"""
|
"""
|
||||||
embedding = self.embedding_function.embed_query(query)
|
embedding = self.embedding_function.embed_query(query)
|
||||||
docs = self.similarity_search_with_score_by_vector(
|
docs = self.similarity_search_with_score_by_vector(
|
||||||
@ -367,6 +376,31 @@ class PGVector(VectorStore):
|
|||||||
k: int = 4,
|
k: int = 4,
|
||||||
filter: Optional[dict] = None,
|
filter: Optional[dict] = None,
|
||||||
) -> List[Tuple[Document, float]]:
|
) -> List[Tuple[Document, float]]:
|
||||||
|
results = self.__query_collection(embedding=embedding, k=k, filter=filter)
|
||||||
|
|
||||||
|
return self._results_to_docs_and_scores(results)
|
||||||
|
|
||||||
|
def _results_to_docs_and_scores(self, results: Any) -> List[Tuple[Document, float]]:
|
||||||
|
"""Return docs and scores from results."""
|
||||||
|
docs = [
|
||||||
|
(
|
||||||
|
Document(
|
||||||
|
page_content=result.EmbeddingStore.document,
|
||||||
|
metadata=result.EmbeddingStore.cmetadata,
|
||||||
|
),
|
||||||
|
result.distance if self.embedding_function is not None else None,
|
||||||
|
)
|
||||||
|
for result in results
|
||||||
|
]
|
||||||
|
return docs
|
||||||
|
|
||||||
|
def __query_collection(
|
||||||
|
self,
|
||||||
|
embedding: List[float],
|
||||||
|
k: int = 4,
|
||||||
|
filter: Optional[Dict[str, str]] = None,
|
||||||
|
) -> List[Any]:
|
||||||
|
"""Query the collection."""
|
||||||
with Session(self._conn) as session:
|
with Session(self._conn) as session:
|
||||||
collection = self.get_collection(session)
|
collection = self.get_collection(session)
|
||||||
if not collection:
|
if not collection:
|
||||||
@ -410,18 +444,7 @@ class PGVector(VectorStore):
|
|||||||
.limit(k)
|
.limit(k)
|
||||||
.all()
|
.all()
|
||||||
)
|
)
|
||||||
|
return results
|
||||||
docs = [
|
|
||||||
(
|
|
||||||
Document(
|
|
||||||
page_content=result.EmbeddingStore.document,
|
|
||||||
metadata=result.EmbeddingStore.cmetadata,
|
|
||||||
),
|
|
||||||
result.distance if self.embedding_function is not None else None,
|
|
||||||
)
|
|
||||||
for result in results
|
|
||||||
]
|
|
||||||
return docs
|
|
||||||
|
|
||||||
def similarity_search_by_vector(
|
def similarity_search_by_vector(
|
||||||
self,
|
self,
|
||||||
@ -443,7 +466,7 @@ class PGVector(VectorStore):
|
|||||||
docs_and_scores = self.similarity_search_with_score_by_vector(
|
docs_and_scores = self.similarity_search_with_score_by_vector(
|
||||||
embedding=embedding, k=k, filter=filter
|
embedding=embedding, k=k, filter=filter
|
||||||
)
|
)
|
||||||
return [doc for doc, _ in docs_and_scores]
|
return _results_to_docs(docs_and_scores)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_texts(
|
def from_texts(
|
||||||
@ -640,3 +663,190 @@ class PGVector(VectorStore):
|
|||||||
f" for distance_strategy of {self._distance_strategy}."
|
f" for distance_strategy of {self._distance_strategy}."
|
||||||
"Consider providing relevance_score_fn to PGVector constructor."
|
"Consider providing relevance_score_fn to PGVector constructor."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def max_marginal_relevance_search_with_score_by_vector(
|
||||||
|
self,
|
||||||
|
embedding: List[float],
|
||||||
|
k: int = 4,
|
||||||
|
fetch_k: int = 20,
|
||||||
|
lambda_mult: float = 0.5,
|
||||||
|
filter: Optional[Dict[str, str]] = None,
|
||||||
|
**kwargs: Any,
|
||||||
|
) -> List[Tuple[Document, float]]:
|
||||||
|
"""Return docs selected using the maximal marginal relevance with score
|
||||||
|
to embedding vector.
|
||||||
|
|
||||||
|
Maximal marginal relevance optimizes for similarity to query AND diversity
|
||||||
|
among selected documents.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
embedding: Embedding to look up documents similar to.
|
||||||
|
k (int): Number of Documents to return. Defaults to 4.
|
||||||
|
fetch_k (int): Number of Documents to fetch to pass to MMR algorithm.
|
||||||
|
Defaults to 20.
|
||||||
|
lambda_mult (float): Number between 0 and 1 that determines the degree
|
||||||
|
of diversity among the results with 0 corresponding
|
||||||
|
to maximum diversity and 1 to minimum diversity.
|
||||||
|
Defaults to 0.5.
|
||||||
|
filter (Optional[Dict[str, str]]): Filter by metadata. Defaults to None.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List[Tuple[Document, float]]: List of Documents selected by maximal marginal
|
||||||
|
relevance to the query and score for each.
|
||||||
|
"""
|
||||||
|
results = self.__query_collection(embedding=embedding, k=fetch_k, filter=filter)
|
||||||
|
|
||||||
|
embedding_list = [result.EmbeddingStore.embedding for result in results]
|
||||||
|
|
||||||
|
mmr_selected = maximal_marginal_relevance(
|
||||||
|
np.array(embedding, dtype=np.float32),
|
||||||
|
embedding_list,
|
||||||
|
k=k,
|
||||||
|
lambda_mult=lambda_mult,
|
||||||
|
)
|
||||||
|
|
||||||
|
candidates = self._results_to_docs_and_scores(results)
|
||||||
|
|
||||||
|
return [r for i, r in enumerate(candidates) if i in mmr_selected]
|
||||||
|
|
||||||
|
def max_marginal_relevance_search(
|
||||||
|
self,
|
||||||
|
query: str,
|
||||||
|
k: int = 4,
|
||||||
|
fetch_k: int = 20,
|
||||||
|
lambda_mult: float = 0.5,
|
||||||
|
filter: Optional[Dict[str, str]] = None,
|
||||||
|
**kwargs: Any,
|
||||||
|
) -> List[Document]:
|
||||||
|
"""Return docs selected using the maximal marginal relevance.
|
||||||
|
|
||||||
|
Maximal marginal relevance optimizes for similarity to query AND diversity
|
||||||
|
among selected documents.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
query (str): Text to look up documents similar to.
|
||||||
|
k (int): Number of Documents to return. Defaults to 4.
|
||||||
|
fetch_k (int): Number of Documents to fetch to pass to MMR algorithm.
|
||||||
|
Defaults to 20.
|
||||||
|
lambda_mult (float): Number between 0 and 1 that determines the degree
|
||||||
|
of diversity among the results with 0 corresponding
|
||||||
|
to maximum diversity and 1 to minimum diversity.
|
||||||
|
Defaults to 0.5.
|
||||||
|
filter (Optional[Dict[str, str]]): Filter by metadata. Defaults to None.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List[Document]: List of Documents selected by maximal marginal relevance.
|
||||||
|
"""
|
||||||
|
embedding = self.embedding_function.embed_query(query)
|
||||||
|
return self.max_marginal_relevance_search_by_vector(
|
||||||
|
embedding,
|
||||||
|
k=k,
|
||||||
|
fetch_k=fetch_k,
|
||||||
|
lambda_mult=lambda_mult,
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
|
def max_marginal_relevance_search_with_score(
|
||||||
|
self,
|
||||||
|
query: str,
|
||||||
|
k: int = 4,
|
||||||
|
fetch_k: int = 20,
|
||||||
|
lambda_mult: float = 0.5,
|
||||||
|
filter: Optional[dict] = None,
|
||||||
|
**kwargs: Any,
|
||||||
|
) -> List[Tuple[Document, float]]:
|
||||||
|
"""Return docs selected using the maximal marginal relevance with score.
|
||||||
|
|
||||||
|
Maximal marginal relevance optimizes for similarity to query AND diversity
|
||||||
|
among selected documents.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
query (str): Text to look up documents similar to.
|
||||||
|
k (int): Number of Documents to return. Defaults to 4.
|
||||||
|
fetch_k (int): Number of Documents to fetch to pass to MMR algorithm.
|
||||||
|
Defaults to 20.
|
||||||
|
lambda_mult (float): Number between 0 and 1 that determines the degree
|
||||||
|
of diversity among the results with 0 corresponding
|
||||||
|
to maximum diversity and 1 to minimum diversity.
|
||||||
|
Defaults to 0.5.
|
||||||
|
filter (Optional[Dict[str, str]]): Filter by metadata. Defaults to None.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List[Tuple[Document, float]]: List of Documents selected by maximal marginal
|
||||||
|
relevance to the query and score for each.
|
||||||
|
"""
|
||||||
|
embedding = self.embedding_function.embed_query(query)
|
||||||
|
docs = self.max_marginal_relevance_search_with_score_by_vector(
|
||||||
|
embedding=embedding,
|
||||||
|
k=k,
|
||||||
|
fetch_k=fetch_k,
|
||||||
|
lambda_mult=lambda_mult,
|
||||||
|
filter=filter,
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
return docs
|
||||||
|
|
||||||
|
def max_marginal_relevance_search_by_vector(
|
||||||
|
self,
|
||||||
|
embedding: List[float],
|
||||||
|
k: int = 4,
|
||||||
|
fetch_k: int = 20,
|
||||||
|
lambda_mult: float = 0.5,
|
||||||
|
filter: Optional[Dict[str, str]] = None,
|
||||||
|
**kwargs: Any,
|
||||||
|
) -> List[Document]:
|
||||||
|
"""Return docs selected using the maximal marginal relevance
|
||||||
|
to embedding vector.
|
||||||
|
|
||||||
|
Maximal marginal relevance optimizes for similarity to query AND diversity
|
||||||
|
among selected documents.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
embedding (str): Text to look up documents similar to.
|
||||||
|
k (int): Number of Documents to return. Defaults to 4.
|
||||||
|
fetch_k (int): Number of Documents to fetch to pass to MMR algorithm.
|
||||||
|
Defaults to 20.
|
||||||
|
lambda_mult (float): Number between 0 and 1 that determines the degree
|
||||||
|
of diversity among the results with 0 corresponding
|
||||||
|
to maximum diversity and 1 to minimum diversity.
|
||||||
|
Defaults to 0.5.
|
||||||
|
filter (Optional[Dict[str, str]]): Filter by metadata. Defaults to None.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List[Document]: List of Documents selected by maximal marginal relevance.
|
||||||
|
"""
|
||||||
|
docs_and_scores = self.max_marginal_relevance_search_with_score_by_vector(
|
||||||
|
embedding,
|
||||||
|
k=k,
|
||||||
|
fetch_k=fetch_k,
|
||||||
|
lambda_mult=lambda_mult,
|
||||||
|
filter=filter,
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
|
return _results_to_docs(docs_and_scores)
|
||||||
|
|
||||||
|
async def amax_marginal_relevance_search_by_vector(
|
||||||
|
self,
|
||||||
|
embedding: List[float],
|
||||||
|
k: int = 4,
|
||||||
|
fetch_k: int = 20,
|
||||||
|
lambda_mult: float = 0.5,
|
||||||
|
filter: Optional[Dict[str, str]] = None,
|
||||||
|
**kwargs: Any,
|
||||||
|
) -> List[Document]:
|
||||||
|
"""Return docs selected using the maximal marginal relevance."""
|
||||||
|
|
||||||
|
# This is a temporary workaround to make the similarity search
|
||||||
|
# asynchronous. The proper solution is to make the similarity search
|
||||||
|
# asynchronous in the vector store implementations.
|
||||||
|
func = partial(
|
||||||
|
self.max_marginal_relevance_search_by_vector,
|
||||||
|
embedding,
|
||||||
|
k=k,
|
||||||
|
fetch_k=fetch_k,
|
||||||
|
lambda_mult=lambda_mult,
|
||||||
|
filter=filter,
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
return await asyncio.get_event_loop().run_in_executor(None, func)
|
||||||
|
@ -279,3 +279,31 @@ def test_pgvector_retriever_search_threshold_custom_normalization_fn() -> None:
|
|||||||
)
|
)
|
||||||
output = retriever.get_relevant_documents("foo")
|
output = retriever.get_relevant_documents("foo")
|
||||||
assert output == []
|
assert output == []
|
||||||
|
|
||||||
|
|
||||||
|
def test_pgvector_max_marginal_relevance_search() -> None:
|
||||||
|
"""Test max marginal relevance search."""
|
||||||
|
texts = ["foo", "bar", "baz"]
|
||||||
|
docsearch = PGVector.from_texts(
|
||||||
|
texts=texts,
|
||||||
|
collection_name="test_collection",
|
||||||
|
embedding=FakeEmbeddingsWithAdaDimension(),
|
||||||
|
connection_string=CONNECTION_STRING,
|
||||||
|
pre_delete_collection=True,
|
||||||
|
)
|
||||||
|
output = docsearch.max_marginal_relevance_search("foo", k=1, fetch_k=3)
|
||||||
|
assert output == [Document(page_content="foo")]
|
||||||
|
|
||||||
|
|
||||||
|
def test_pgvector_max_marginal_relevance_search_with_score() -> None:
|
||||||
|
"""Test max marginal relevance search with relevance scores."""
|
||||||
|
texts = ["foo", "bar", "baz"]
|
||||||
|
docsearch = PGVector.from_texts(
|
||||||
|
texts=texts,
|
||||||
|
collection_name="test_collection",
|
||||||
|
embedding=FakeEmbeddingsWithAdaDimension(),
|
||||||
|
connection_string=CONNECTION_STRING,
|
||||||
|
pre_delete_collection=True,
|
||||||
|
)
|
||||||
|
output = docsearch.max_marginal_relevance_search_with_score("foo", k=1, fetch_k=3)
|
||||||
|
assert output == [(Document(page_content="foo"), 0.0)]
|
||||||
|
Loading…
Reference in New Issue
Block a user