2022-11-02 04:29:39 +00:00
|
|
|
"""Test FAISS functionality."""
|
2023-06-07 05:07:27 +00:00
|
|
|
import datetime
|
2023-04-16 04:06:08 +00:00
|
|
|
import math
|
2023-01-22 00:08:14 +00:00
|
|
|
import tempfile
|
2022-11-02 04:29:39 +00:00
|
|
|
|
|
|
|
import pytest
|
|
|
|
|
|
|
|
from langchain.docstore.document import Document
|
|
|
|
from langchain.docstore.in_memory import InMemoryDocstore
|
2022-11-21 00:23:58 +00:00
|
|
|
from langchain.docstore.wikipedia import Wikipedia
|
2022-11-09 21:26:58 +00:00
|
|
|
from langchain.vectorstores.faiss import FAISS
|
2023-02-03 06:05:47 +00:00
|
|
|
from tests.integration_tests.vectorstores.fake_embeddings import FakeEmbeddings
|
2022-11-02 04:29:39 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_faiss() -> None:
|
|
|
|
"""Test end to end construction and search."""
|
|
|
|
texts = ["foo", "bar", "baz"]
|
|
|
|
docsearch = FAISS.from_texts(texts, FakeEmbeddings())
|
2022-11-21 00:23:58 +00:00
|
|
|
index_to_id = docsearch.index_to_docstore_id
|
2022-11-02 04:29:39 +00:00
|
|
|
expected_docstore = InMemoryDocstore(
|
|
|
|
{
|
2022-11-21 00:23:58 +00:00
|
|
|
index_to_id[0]: Document(page_content="foo"),
|
|
|
|
index_to_id[1]: Document(page_content="bar"),
|
|
|
|
index_to_id[2]: Document(page_content="baz"),
|
2022-11-02 04:29:39 +00:00
|
|
|
}
|
|
|
|
)
|
|
|
|
assert docsearch.docstore.__dict__ == expected_docstore.__dict__
|
|
|
|
output = docsearch.similarity_search("foo", k=1)
|
|
|
|
assert output == [Document(page_content="foo")]
|
|
|
|
|
|
|
|
|
2023-02-16 06:50:00 +00:00
|
|
|
def test_faiss_vector_sim() -> None:
|
|
|
|
"""Test vector similarity."""
|
|
|
|
texts = ["foo", "bar", "baz"]
|
|
|
|
docsearch = FAISS.from_texts(texts, FakeEmbeddings())
|
|
|
|
index_to_id = docsearch.index_to_docstore_id
|
|
|
|
expected_docstore = InMemoryDocstore(
|
|
|
|
{
|
|
|
|
index_to_id[0]: Document(page_content="foo"),
|
|
|
|
index_to_id[1]: Document(page_content="bar"),
|
|
|
|
index_to_id[2]: Document(page_content="baz"),
|
|
|
|
}
|
|
|
|
)
|
|
|
|
assert docsearch.docstore.__dict__ == expected_docstore.__dict__
|
|
|
|
query_vec = FakeEmbeddings().embed_query(text="foo")
|
|
|
|
output = docsearch.similarity_search_by_vector(query_vec, k=1)
|
|
|
|
assert output == [Document(page_content="foo")]
|
|
|
|
|
2023-06-29 05:00:34 +00:00
|
|
|
|
|
|
|
def test_faiss_mmr() -> None:
|
|
|
|
texts = ["foo", "foo", "fou", "foy"]
|
|
|
|
docsearch = FAISS.from_texts(texts, FakeEmbeddings())
|
|
|
|
query_vec = FakeEmbeddings().embed_query(text="foo")
|
2023-02-21 00:39:13 +00:00
|
|
|
# make sure we can have k > docstore size
|
2023-06-29 05:00:34 +00:00
|
|
|
output = docsearch.max_marginal_relevance_search_with_score_by_vector(
|
|
|
|
query_vec, k=10, lambda_mult=0.1
|
|
|
|
)
|
2023-02-21 00:39:13 +00:00
|
|
|
assert len(output) == len(texts)
|
2023-06-29 05:00:34 +00:00
|
|
|
assert output[0][0] == Document(page_content="foo")
|
|
|
|
assert output[0][1] == 0.0
|
|
|
|
assert output[1][0] != Document(page_content="foo")
|
2023-02-21 00:39:13 +00:00
|
|
|
|
2023-02-16 06:50:00 +00:00
|
|
|
|
2022-11-20 04:32:45 +00:00
|
|
|
def test_faiss_with_metadatas() -> None:
|
|
|
|
"""Test end to end construction and search."""
|
|
|
|
texts = ["foo", "bar", "baz"]
|
|
|
|
metadatas = [{"page": i} for i in range(len(texts))]
|
|
|
|
docsearch = FAISS.from_texts(texts, FakeEmbeddings(), metadatas=metadatas)
|
|
|
|
expected_docstore = InMemoryDocstore(
|
|
|
|
{
|
2023-01-22 00:08:14 +00:00
|
|
|
docsearch.index_to_docstore_id[0]: Document(
|
|
|
|
page_content="foo", metadata={"page": 0}
|
|
|
|
),
|
|
|
|
docsearch.index_to_docstore_id[1]: Document(
|
|
|
|
page_content="bar", metadata={"page": 1}
|
|
|
|
),
|
|
|
|
docsearch.index_to_docstore_id[2]: Document(
|
|
|
|
page_content="baz", metadata={"page": 2}
|
|
|
|
),
|
2022-11-20 04:32:45 +00:00
|
|
|
}
|
|
|
|
)
|
|
|
|
assert docsearch.docstore.__dict__ == expected_docstore.__dict__
|
|
|
|
output = docsearch.similarity_search("foo", k=1)
|
|
|
|
assert output == [Document(page_content="foo", metadata={"page": 0})]
|
|
|
|
|
|
|
|
|
2023-06-11 20:20:03 +00:00
|
|
|
def test_faiss_with_metadatas_and_filter() -> None:
|
|
|
|
texts = ["foo", "bar", "baz"]
|
|
|
|
metadatas = [{"page": i} for i in range(len(texts))]
|
|
|
|
docsearch = FAISS.from_texts(texts, FakeEmbeddings(), metadatas=metadatas)
|
|
|
|
expected_docstore = InMemoryDocstore(
|
|
|
|
{
|
|
|
|
docsearch.index_to_docstore_id[0]: Document(
|
|
|
|
page_content="foo", metadata={"page": 0}
|
|
|
|
),
|
|
|
|
docsearch.index_to_docstore_id[1]: Document(
|
|
|
|
page_content="bar", metadata={"page": 1}
|
|
|
|
),
|
|
|
|
docsearch.index_to_docstore_id[2]: Document(
|
|
|
|
page_content="baz", metadata={"page": 2}
|
|
|
|
),
|
|
|
|
}
|
|
|
|
)
|
|
|
|
assert docsearch.docstore.__dict__ == expected_docstore.__dict__
|
|
|
|
output = docsearch.similarity_search("foo", k=1, filter={"page": 1})
|
2023-06-19 00:25:49 +00:00
|
|
|
assert output == [Document(page_content="bar", metadata={"page": 1})]
|
2023-06-11 20:20:03 +00:00
|
|
|
|
|
|
|
|
2023-06-21 17:49:01 +00:00
|
|
|
def test_faiss_with_metadatas_and_list_filter() -> None:
|
|
|
|
texts = ["foo", "bar", "baz", "foo", "qux"]
|
|
|
|
metadatas = [{"page": i} if i <= 3 else {"page": 3} for i in range(len(texts))]
|
|
|
|
docsearch = FAISS.from_texts(texts, FakeEmbeddings(), metadatas=metadatas)
|
|
|
|
expected_docstore = InMemoryDocstore(
|
|
|
|
{
|
|
|
|
docsearch.index_to_docstore_id[0]: Document(
|
|
|
|
page_content="foo", metadata={"page": 0}
|
|
|
|
),
|
|
|
|
docsearch.index_to_docstore_id[1]: Document(
|
|
|
|
page_content="bar", metadata={"page": 1}
|
|
|
|
),
|
|
|
|
docsearch.index_to_docstore_id[2]: Document(
|
|
|
|
page_content="baz", metadata={"page": 2}
|
|
|
|
),
|
|
|
|
docsearch.index_to_docstore_id[3]: Document(
|
|
|
|
page_content="foo", metadata={"page": 3}
|
|
|
|
),
|
|
|
|
docsearch.index_to_docstore_id[4]: Document(
|
|
|
|
page_content="qux", metadata={"page": 3}
|
|
|
|
),
|
|
|
|
}
|
|
|
|
)
|
|
|
|
assert docsearch.docstore.__dict__ == expected_docstore.__dict__
|
|
|
|
output = docsearch.similarity_search("foor", k=1, filter={"page": [0, 1, 2]})
|
|
|
|
assert output == [Document(page_content="foo", metadata={"page": 0})]
|
|
|
|
|
|
|
|
|
2022-11-02 04:29:39 +00:00
|
|
|
def test_faiss_search_not_found() -> None:
|
|
|
|
"""Test what happens when document is not found."""
|
|
|
|
texts = ["foo", "bar", "baz"]
|
|
|
|
docsearch = FAISS.from_texts(texts, FakeEmbeddings())
|
|
|
|
# Get rid of the docstore to purposefully induce errors.
|
|
|
|
docsearch.docstore = InMemoryDocstore({})
|
|
|
|
with pytest.raises(ValueError):
|
|
|
|
docsearch.similarity_search("foo")
|
2022-11-21 00:23:58 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_faiss_add_texts() -> None:
|
|
|
|
"""Test end to end adding of texts."""
|
|
|
|
# Create initial doc store.
|
|
|
|
texts = ["foo", "bar", "baz"]
|
|
|
|
docsearch = FAISS.from_texts(texts, FakeEmbeddings())
|
|
|
|
# Test adding a similar document as before.
|
|
|
|
docsearch.add_texts(["foo"])
|
|
|
|
output = docsearch.similarity_search("foo", k=2)
|
|
|
|
assert output == [Document(page_content="foo"), Document(page_content="foo")]
|
|
|
|
|
|
|
|
|
|
|
|
def test_faiss_add_texts_not_supported() -> None:
|
|
|
|
"""Test adding of texts to a docstore that doesn't support it."""
|
|
|
|
docsearch = FAISS(FakeEmbeddings().embed_query, None, Wikipedia(), {})
|
|
|
|
with pytest.raises(ValueError):
|
|
|
|
docsearch.add_texts(["foo"])
|
2023-01-22 00:08:14 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_faiss_local_save_load() -> None:
|
|
|
|
"""Test end to end serialization."""
|
|
|
|
texts = ["foo", "bar", "baz"]
|
|
|
|
docsearch = FAISS.from_texts(texts, FakeEmbeddings())
|
2023-06-07 05:07:27 +00:00
|
|
|
temp_timestamp = datetime.datetime.utcnow().strftime("%Y%m%d-%H%M%S")
|
|
|
|
with tempfile.TemporaryDirectory(suffix="_" + temp_timestamp + "/") as temp_folder:
|
|
|
|
docsearch.save_local(temp_folder)
|
|
|
|
new_docsearch = FAISS.load_local(temp_folder, FakeEmbeddings())
|
2023-02-07 05:44:50 +00:00
|
|
|
assert new_docsearch.index is not None
|
2023-04-16 04:06:08 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_faiss_similarity_search_with_relevance_scores() -> None:
|
|
|
|
"""Test the similarity search with normalized similarities."""
|
|
|
|
texts = ["foo", "bar", "baz"]
|
|
|
|
docsearch = FAISS.from_texts(
|
|
|
|
texts,
|
|
|
|
FakeEmbeddings(),
|
2023-06-07 05:07:27 +00:00
|
|
|
relevance_score_fn=lambda score: 1.0 - score / math.sqrt(2),
|
2023-04-16 04:06:08 +00:00
|
|
|
)
|
|
|
|
outputs = docsearch.similarity_search_with_relevance_scores("foo", k=1)
|
|
|
|
output, score = outputs[0]
|
|
|
|
assert output == Document(page_content="foo")
|
|
|
|
assert score == 1.0
|
|
|
|
|
|
|
|
|
|
|
|
def test_faiss_invalid_normalize_fn() -> None:
|
|
|
|
"""Test the similarity search with normalized similarities."""
|
|
|
|
texts = ["foo", "bar", "baz"]
|
|
|
|
docsearch = FAISS.from_texts(
|
2023-06-07 05:07:27 +00:00
|
|
|
texts, FakeEmbeddings(), relevance_score_fn=lambda _: 2.0
|
2023-04-16 04:06:08 +00:00
|
|
|
)
|
2023-06-07 05:07:27 +00:00
|
|
|
with pytest.warns(Warning, match="scores must be between"):
|
2023-04-16 04:06:08 +00:00
|
|
|
docsearch.similarity_search_with_relevance_scores("foo", k=1)
|