mirror of
https://github.com/hwchase17/langchain
synced 2024-10-29 17:07:25 +00:00
d7d629911b
Inspired by the filtering capability available in ChromaDB, added the same functionality to the FAISS vectorestore as well. Since FAISS does not have an inbuilt method of filtering used the approach suggested in this [thread](https://github.com/facebookresearch/faiss/issues/1079) Langchain Issue inspiration: https://github.com/hwchase17/langchain/issues/4572 - [x] Added filtering capability to semantic similarly and MMR - [x] Added test cases for filtering in `tests/integration_tests/vectorstores/test_faiss.py` #### Who can review? Tag maintainers/contributors who might be interested: VectorStores / Retrievers / Memory - @dev2049 - @hwchase17
169 lines
6.3 KiB
Python
169 lines
6.3 KiB
Python
"""Test FAISS functionality."""
|
|
import datetime
|
|
import math
|
|
import tempfile
|
|
|
|
import pytest
|
|
|
|
from langchain.docstore.document import Document
|
|
from langchain.docstore.in_memory import InMemoryDocstore
|
|
from langchain.docstore.wikipedia import Wikipedia
|
|
from langchain.vectorstores.faiss import FAISS
|
|
from tests.integration_tests.vectorstores.fake_embeddings import FakeEmbeddings
|
|
|
|
|
|
def test_faiss() -> None:
|
|
"""Test end to end construction and search."""
|
|
texts = ["foo", "bar", "baz"]
|
|
docsearch = FAISS.from_texts(texts, FakeEmbeddings())
|
|
index_to_id = docsearch.index_to_docstore_id
|
|
expected_docstore = InMemoryDocstore(
|
|
{
|
|
index_to_id[0]: Document(page_content="foo"),
|
|
index_to_id[1]: Document(page_content="bar"),
|
|
index_to_id[2]: Document(page_content="baz"),
|
|
}
|
|
)
|
|
assert docsearch.docstore.__dict__ == expected_docstore.__dict__
|
|
output = docsearch.similarity_search("foo", k=1)
|
|
assert output == [Document(page_content="foo")]
|
|
|
|
|
|
def test_faiss_vector_sim() -> None:
|
|
"""Test vector similarity."""
|
|
texts = ["foo", "bar", "baz"]
|
|
docsearch = FAISS.from_texts(texts, FakeEmbeddings())
|
|
index_to_id = docsearch.index_to_docstore_id
|
|
expected_docstore = InMemoryDocstore(
|
|
{
|
|
index_to_id[0]: Document(page_content="foo"),
|
|
index_to_id[1]: Document(page_content="bar"),
|
|
index_to_id[2]: Document(page_content="baz"),
|
|
}
|
|
)
|
|
assert docsearch.docstore.__dict__ == expected_docstore.__dict__
|
|
query_vec = FakeEmbeddings().embed_query(text="foo")
|
|
output = docsearch.similarity_search_by_vector(query_vec, k=1)
|
|
assert output == [Document(page_content="foo")]
|
|
|
|
# make sure we can have k > docstore size
|
|
output = docsearch.max_marginal_relevance_search_by_vector(query_vec, k=10)
|
|
assert len(output) == len(texts)
|
|
|
|
|
|
def test_faiss_with_metadatas() -> None:
|
|
"""Test end to end construction and search."""
|
|
texts = ["foo", "bar", "baz"]
|
|
metadatas = [{"page": i} for i in range(len(texts))]
|
|
docsearch = FAISS.from_texts(texts, FakeEmbeddings(), metadatas=metadatas)
|
|
expected_docstore = InMemoryDocstore(
|
|
{
|
|
docsearch.index_to_docstore_id[0]: Document(
|
|
page_content="foo", metadata={"page": 0}
|
|
),
|
|
docsearch.index_to_docstore_id[1]: Document(
|
|
page_content="bar", metadata={"page": 1}
|
|
),
|
|
docsearch.index_to_docstore_id[2]: Document(
|
|
page_content="baz", metadata={"page": 2}
|
|
),
|
|
}
|
|
)
|
|
assert docsearch.docstore.__dict__ == expected_docstore.__dict__
|
|
output = docsearch.similarity_search("foo", k=1)
|
|
assert output == [Document(page_content="foo", metadata={"page": 0})]
|
|
|
|
|
|
def test_faiss_with_metadatas_and_filter() -> None:
|
|
texts = ["foo", "bar", "baz"]
|
|
metadatas = [{"page": i} for i in range(len(texts))]
|
|
docsearch = FAISS.from_texts(texts, FakeEmbeddings(), metadatas=metadatas)
|
|
expected_docstore = InMemoryDocstore(
|
|
{
|
|
docsearch.index_to_docstore_id[0]: Document(
|
|
page_content="foo", metadata={"page": 0}
|
|
),
|
|
docsearch.index_to_docstore_id[1]: Document(
|
|
page_content="bar", metadata={"page": 1}
|
|
),
|
|
docsearch.index_to_docstore_id[2]: Document(
|
|
page_content="baz", metadata={"page": 2}
|
|
),
|
|
}
|
|
)
|
|
assert docsearch.docstore.__dict__ == expected_docstore.__dict__
|
|
output = docsearch.similarity_search("foo", k=1, filter={"page": 1})
|
|
assert output == []
|
|
|
|
|
|
def test_faiss_search_not_found() -> None:
|
|
"""Test what happens when document is not found."""
|
|
texts = ["foo", "bar", "baz"]
|
|
docsearch = FAISS.from_texts(texts, FakeEmbeddings())
|
|
# Get rid of the docstore to purposefully induce errors.
|
|
docsearch.docstore = InMemoryDocstore({})
|
|
with pytest.raises(ValueError):
|
|
docsearch.similarity_search("foo")
|
|
|
|
|
|
def test_faiss_add_texts() -> None:
|
|
"""Test end to end adding of texts."""
|
|
# Create initial doc store.
|
|
texts = ["foo", "bar", "baz"]
|
|
docsearch = FAISS.from_texts(texts, FakeEmbeddings())
|
|
# Test adding a similar document as before.
|
|
docsearch.add_texts(["foo"])
|
|
output = docsearch.similarity_search("foo", k=2)
|
|
assert output == [Document(page_content="foo"), Document(page_content="foo")]
|
|
|
|
|
|
def test_faiss_add_texts_not_supported() -> None:
|
|
"""Test adding of texts to a docstore that doesn't support it."""
|
|
docsearch = FAISS(FakeEmbeddings().embed_query, None, Wikipedia(), {})
|
|
with pytest.raises(ValueError):
|
|
docsearch.add_texts(["foo"])
|
|
|
|
|
|
def test_faiss_local_save_load() -> None:
|
|
"""Test end to end serialization."""
|
|
texts = ["foo", "bar", "baz"]
|
|
docsearch = FAISS.from_texts(texts, FakeEmbeddings())
|
|
temp_timestamp = datetime.datetime.utcnow().strftime("%Y%m%d-%H%M%S")
|
|
with tempfile.TemporaryDirectory(suffix="_" + temp_timestamp + "/") as temp_folder:
|
|
docsearch.save_local(temp_folder)
|
|
new_docsearch = FAISS.load_local(temp_folder, FakeEmbeddings())
|
|
assert new_docsearch.index is not None
|
|
|
|
|
|
def test_faiss_similarity_search_with_relevance_scores() -> None:
|
|
"""Test the similarity search with normalized similarities."""
|
|
texts = ["foo", "bar", "baz"]
|
|
docsearch = FAISS.from_texts(
|
|
texts,
|
|
FakeEmbeddings(),
|
|
relevance_score_fn=lambda score: 1.0 - score / math.sqrt(2),
|
|
)
|
|
outputs = docsearch.similarity_search_with_relevance_scores("foo", k=1)
|
|
output, score = outputs[0]
|
|
assert output == Document(page_content="foo")
|
|
assert score == 1.0
|
|
|
|
|
|
def test_faiss_invalid_normalize_fn() -> None:
|
|
"""Test the similarity search with normalized similarities."""
|
|
texts = ["foo", "bar", "baz"]
|
|
docsearch = FAISS.from_texts(
|
|
texts, FakeEmbeddings(), relevance_score_fn=lambda _: 2.0
|
|
)
|
|
with pytest.warns(Warning, match="scores must be between"):
|
|
docsearch.similarity_search_with_relevance_scores("foo", k=1)
|
|
|
|
|
|
def test_missing_normalize_score_fn() -> None:
|
|
"""Test doesn't perform similarity search without a normalize score function."""
|
|
with pytest.raises(ValueError):
|
|
texts = ["foo", "bar", "baz"]
|
|
faiss_instance = FAISS.from_texts(texts, FakeEmbeddings())
|
|
faiss_instance.relevance_score_fn = None
|
|
faiss_instance.similarity_search_with_relevance_scores("foo", k=2)
|