2023-02-13 01:43:48 +00:00
|
|
|
"""Test Chroma functionality."""
|
2023-04-12 01:49:08 +00:00
|
|
|
import pytest
|
|
|
|
|
2023-02-13 01:43:48 +00:00
|
|
|
from langchain.docstore.document import Document
|
|
|
|
from langchain.vectorstores import Chroma
|
2023-06-02 18:12:48 +00:00
|
|
|
from tests.integration_tests.vectorstores.fake_embeddings import (
|
|
|
|
ConsistentFakeEmbeddings,
|
|
|
|
FakeEmbeddings,
|
|
|
|
)
|
2023-02-13 01:43:48 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_chroma() -> None:
|
|
|
|
"""Test end to end construction and search."""
|
|
|
|
texts = ["foo", "bar", "baz"]
|
|
|
|
docsearch = Chroma.from_texts(
|
|
|
|
collection_name="test_collection", texts=texts, embedding=FakeEmbeddings()
|
|
|
|
)
|
|
|
|
output = docsearch.similarity_search("foo", k=1)
|
|
|
|
assert output == [Document(page_content="foo")]
|
|
|
|
|
|
|
|
|
2023-04-12 01:49:08 +00:00
|
|
|
@pytest.mark.asyncio
|
|
|
|
async def test_chroma_async() -> None:
|
|
|
|
"""Test end to end construction and search."""
|
|
|
|
texts = ["foo", "bar", "baz"]
|
|
|
|
docsearch = Chroma.from_texts(
|
|
|
|
collection_name="test_collection", texts=texts, embedding=FakeEmbeddings()
|
|
|
|
)
|
|
|
|
output = await docsearch.asimilarity_search("foo", k=1)
|
|
|
|
assert output == [Document(page_content="foo")]
|
|
|
|
|
|
|
|
|
2023-02-13 01:43:48 +00:00
|
|
|
def test_chroma_with_metadatas() -> None:
|
|
|
|
"""Test end to end construction and search."""
|
|
|
|
texts = ["foo", "bar", "baz"]
|
|
|
|
metadatas = [{"page": str(i)} for i in range(len(texts))]
|
|
|
|
docsearch = Chroma.from_texts(
|
|
|
|
collection_name="test_collection",
|
|
|
|
texts=texts,
|
|
|
|
embedding=FakeEmbeddings(),
|
|
|
|
metadatas=metadatas,
|
|
|
|
)
|
|
|
|
output = docsearch.similarity_search("foo", k=1)
|
|
|
|
assert output == [Document(page_content="foo", metadata={"page": "0"})]
|
2023-02-14 05:09:06 +00:00
|
|
|
|
|
|
|
|
2023-03-04 16:10:15 +00:00
|
|
|
def test_chroma_with_metadatas_with_scores() -> None:
|
2023-03-23 02:40:10 +00:00
|
|
|
"""Test end to end construction and scored search."""
|
2023-03-04 16:10:15 +00:00
|
|
|
texts = ["foo", "bar", "baz"]
|
|
|
|
metadatas = [{"page": str(i)} for i in range(len(texts))]
|
|
|
|
docsearch = Chroma.from_texts(
|
|
|
|
collection_name="test_collection",
|
|
|
|
texts=texts,
|
|
|
|
embedding=FakeEmbeddings(),
|
|
|
|
metadatas=metadatas,
|
|
|
|
)
|
|
|
|
output = docsearch.similarity_search_with_score("foo", k=1)
|
2023-03-23 02:40:10 +00:00
|
|
|
assert output == [(Document(page_content="foo", metadata={"page": "0"}), 0.0)]
|
|
|
|
|
|
|
|
|
2023-07-06 14:01:55 +00:00
|
|
|
def test_chroma_with_metadatas_with_scores_using_vector() -> None:
|
|
|
|
"""Test end to end construction and scored search, using embedding vector."""
|
|
|
|
texts = ["foo", "bar", "baz"]
|
|
|
|
metadatas = [{"page": str(i)} for i in range(len(texts))]
|
|
|
|
embeddings = FakeEmbeddings()
|
|
|
|
|
|
|
|
docsearch = Chroma.from_texts(
|
|
|
|
collection_name="test_collection",
|
|
|
|
texts=texts,
|
|
|
|
embedding=embeddings,
|
|
|
|
metadatas=metadatas,
|
|
|
|
)
|
|
|
|
embedded_query = embeddings.embed_query("foo")
|
|
|
|
output = docsearch.similarity_search_by_vector_with_relevance_scores(
|
|
|
|
embedding=embedded_query, k=1
|
|
|
|
)
|
|
|
|
assert output == [(Document(page_content="foo", metadata={"page": "0"}), 0.0)]
|
|
|
|
|
|
|
|
|
2023-03-23 02:40:10 +00:00
|
|
|
def test_chroma_search_filter() -> None:
|
|
|
|
"""Test end to end construction and search with metadata filtering."""
|
|
|
|
texts = ["far", "bar", "baz"]
|
|
|
|
metadatas = [{"first_letter": "{}".format(text[0])} for text in texts]
|
|
|
|
docsearch = Chroma.from_texts(
|
|
|
|
collection_name="test_collection",
|
|
|
|
texts=texts,
|
|
|
|
embedding=FakeEmbeddings(),
|
|
|
|
metadatas=metadatas,
|
|
|
|
)
|
|
|
|
output = docsearch.similarity_search("far", k=1, filter={"first_letter": "f"})
|
|
|
|
assert output == [Document(page_content="far", metadata={"first_letter": "f"})]
|
|
|
|
output = docsearch.similarity_search("far", k=1, filter={"first_letter": "b"})
|
|
|
|
assert output == [Document(page_content="bar", metadata={"first_letter": "b"})]
|
|
|
|
|
|
|
|
|
|
|
|
def test_chroma_search_filter_with_scores() -> None:
|
|
|
|
"""Test end to end construction and scored search with metadata filtering."""
|
|
|
|
texts = ["far", "bar", "baz"]
|
|
|
|
metadatas = [{"first_letter": "{}".format(text[0])} for text in texts]
|
|
|
|
docsearch = Chroma.from_texts(
|
|
|
|
collection_name="test_collection",
|
|
|
|
texts=texts,
|
|
|
|
embedding=FakeEmbeddings(),
|
|
|
|
metadatas=metadatas,
|
|
|
|
)
|
|
|
|
output = docsearch.similarity_search_with_score(
|
|
|
|
"far", k=1, filter={"first_letter": "f"}
|
|
|
|
)
|
|
|
|
assert output == [
|
|
|
|
(Document(page_content="far", metadata={"first_letter": "f"}), 0.0)
|
|
|
|
]
|
|
|
|
output = docsearch.similarity_search_with_score(
|
|
|
|
"far", k=1, filter={"first_letter": "b"}
|
|
|
|
)
|
|
|
|
assert output == [
|
|
|
|
(Document(page_content="bar", metadata={"first_letter": "b"}), 1.0)
|
|
|
|
]
|
2023-03-04 16:10:15 +00:00
|
|
|
|
|
|
|
|
2023-02-14 05:09:06 +00:00
|
|
|
def test_chroma_with_persistence() -> None:
|
|
|
|
"""Test end to end construction and search, with persistence."""
|
|
|
|
chroma_persist_dir = "./tests/persist_dir"
|
|
|
|
collection_name = "test_collection"
|
|
|
|
texts = ["foo", "bar", "baz"]
|
|
|
|
docsearch = Chroma.from_texts(
|
|
|
|
collection_name=collection_name,
|
|
|
|
texts=texts,
|
|
|
|
embedding=FakeEmbeddings(),
|
|
|
|
persist_directory=chroma_persist_dir,
|
|
|
|
)
|
|
|
|
|
|
|
|
output = docsearch.similarity_search("foo", k=1)
|
|
|
|
assert output == [Document(page_content="foo")]
|
|
|
|
|
|
|
|
docsearch.persist()
|
|
|
|
|
|
|
|
# Get a new VectorStore from the persisted directory
|
|
|
|
docsearch = Chroma(
|
|
|
|
collection_name=collection_name,
|
|
|
|
embedding_function=FakeEmbeddings(),
|
|
|
|
persist_directory=chroma_persist_dir,
|
|
|
|
)
|
|
|
|
output = docsearch.similarity_search("foo", k=1)
|
|
|
|
|
|
|
|
# Clean up
|
|
|
|
docsearch.delete_collection()
|
|
|
|
|
|
|
|
# Persist doesn't need to be called again
|
|
|
|
# Data will be automatically persisted on object deletion
|
|
|
|
# Or on program exit
|
2023-05-01 17:47:15 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_chroma_mmr() -> None:
|
|
|
|
"""Test end to end construction and search."""
|
|
|
|
texts = ["foo", "bar", "baz"]
|
|
|
|
docsearch = Chroma.from_texts(
|
|
|
|
collection_name="test_collection", texts=texts, embedding=FakeEmbeddings()
|
|
|
|
)
|
|
|
|
output = docsearch.max_marginal_relevance_search("foo", k=1)
|
|
|
|
assert output == [Document(page_content="foo")]
|
|
|
|
|
|
|
|
|
|
|
|
def test_chroma_mmr_by_vector() -> None:
|
|
|
|
"""Test end to end construction and search."""
|
|
|
|
texts = ["foo", "bar", "baz"]
|
|
|
|
embeddings = FakeEmbeddings()
|
|
|
|
docsearch = Chroma.from_texts(
|
|
|
|
collection_name="test_collection", texts=texts, embedding=embeddings
|
|
|
|
)
|
|
|
|
embedded_query = embeddings.embed_query("foo")
|
|
|
|
output = docsearch.max_marginal_relevance_search_by_vector(embedded_query, k=1)
|
|
|
|
assert output == [Document(page_content="foo")]
|
2023-05-16 21:43:09 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_chroma_with_include_parameter() -> None:
|
|
|
|
"""Test end to end construction and include parameter."""
|
|
|
|
texts = ["foo", "bar", "baz"]
|
|
|
|
docsearch = Chroma.from_texts(
|
|
|
|
collection_name="test_collection", texts=texts, embedding=FakeEmbeddings()
|
|
|
|
)
|
|
|
|
output = docsearch.get(include=["embeddings"])
|
|
|
|
assert output["embeddings"] is not None
|
|
|
|
output = docsearch.get()
|
|
|
|
assert output["embeddings"] is None
|
2023-05-29 13:39:25 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_chroma_update_document() -> None:
|
|
|
|
"""Test the update_document function in the Chroma class."""
|
2023-06-02 18:12:48 +00:00
|
|
|
# Make a consistent embedding
|
|
|
|
embedding = ConsistentFakeEmbeddings()
|
2023-05-29 13:39:25 +00:00
|
|
|
|
|
|
|
# Initial document content and id
|
|
|
|
initial_content = "foo"
|
|
|
|
document_id = "doc1"
|
|
|
|
|
|
|
|
# Create an instance of Document with initial content and metadata
|
|
|
|
original_doc = Document(page_content=initial_content, metadata={"page": "0"})
|
|
|
|
|
|
|
|
# Initialize a Chroma instance with the original document
|
|
|
|
docsearch = Chroma.from_documents(
|
|
|
|
collection_name="test_collection",
|
|
|
|
documents=[original_doc],
|
2023-06-02 18:12:48 +00:00
|
|
|
embedding=embedding,
|
2023-05-29 13:39:25 +00:00
|
|
|
ids=[document_id],
|
|
|
|
)
|
2023-06-02 18:12:48 +00:00
|
|
|
old_embedding = docsearch._collection.peek()["embeddings"][
|
|
|
|
docsearch._collection.peek()["ids"].index(document_id)
|
|
|
|
]
|
2023-05-29 13:39:25 +00:00
|
|
|
|
|
|
|
# Define updated content for the document
|
|
|
|
updated_content = "updated foo"
|
|
|
|
|
|
|
|
# Create a new Document instance with the updated content and the same id
|
|
|
|
updated_doc = Document(page_content=updated_content, metadata={"page": "0"})
|
|
|
|
|
|
|
|
# Update the document in the Chroma instance
|
|
|
|
docsearch.update_document(document_id=document_id, document=updated_doc)
|
|
|
|
|
|
|
|
# Perform a similarity search with the updated content
|
|
|
|
output = docsearch.similarity_search(updated_content, k=1)
|
|
|
|
|
|
|
|
# Assert that the updated document is returned by the search
|
|
|
|
assert output == [Document(page_content=updated_content, metadata={"page": "0"})]
|
2023-06-02 18:12:48 +00:00
|
|
|
|
|
|
|
# Assert that the new embedding is correct
|
|
|
|
new_embedding = docsearch._collection.peek()["embeddings"][
|
|
|
|
docsearch._collection.peek()["ids"].index(document_id)
|
|
|
|
]
|
|
|
|
assert new_embedding == embedding.embed_documents([updated_content])[0]
|
|
|
|
assert new_embedding != old_embedding
|