mirror of
https://github.com/hwchase17/langchain
synced 2024-10-29 17:07:25 +00:00
5debd5043e
<!-- Thank you for contributing to LangChain! Your PR will appear in our release under the title you set. Please make sure it highlights your valuable contribution. Replace this with a description of the change, the issue it fixes (if applicable), and relevant context. List any dependencies required for this change. After you're done, someone will review your PR. They may suggest improvements. If no one reviews your PR within a few days, feel free to @-mention the same people again, as notifications can get lost. Finally, we'd love to show appreciation for your contribution - if you'd like us to shout you out on Twitter, please also include your handle! --> <!-- Remove if not applicable --> Fixes # (issue) #### Before submitting <!-- If you're adding a new integration, please include: 1. a test for the integration - favor unit tests that does not rely on network access. 2. an example notebook showing its use See contribution guidelines for more information on how to write tests, lint etc: https://github.com/hwchase17/langchain/blob/master/.github/CONTRIBUTING.md --> #### Who can review? Tag maintainers/contributors who might be interested: <!-- For a quicker response, figure out the right person to tag with @ @hwchase17 - project lead Tracing / Callbacks - @agola11 Async - @agola11 DataLoaders - @eyurtsev Models - @hwchase17 - @agola11 Agents / Tools / Toolkits - @hwchase17 VectorStores / Retrievers / Memory - @dev2049 --> 1. Added use cases of the new features 2. Done some code refactoring --------- Co-authored-by: Ivo Stranic <istranic@gmail.com>
239 lines
7.6 KiB
Python
239 lines
7.6 KiB
Python
"""Test Deep Lake functionality."""
|
|
import deeplake
|
|
import pytest
|
|
from pytest import FixtureRequest
|
|
|
|
from langchain.docstore.document import Document
|
|
from langchain.vectorstores import DeepLake
|
|
from tests.integration_tests.vectorstores.fake_embeddings import FakeEmbeddings
|
|
|
|
|
|
@pytest.fixture
|
|
def deeplake_datastore() -> DeepLake:
|
|
texts = ["foo", "bar", "baz"]
|
|
metadatas = [{"page": str(i)} for i in range(len(texts))]
|
|
docsearch = DeepLake.from_texts(
|
|
dataset_path="./test_path",
|
|
texts=texts,
|
|
metadatas=metadatas,
|
|
embedding=FakeEmbeddings(),
|
|
overwrite=True,
|
|
)
|
|
return docsearch
|
|
|
|
|
|
@pytest.fixture(params=["L1", "L2", "max", "cos"])
|
|
def distance_metric(request: FixtureRequest) -> str:
|
|
return request.param
|
|
|
|
|
|
def test_deeplake() -> None:
|
|
"""Test end to end construction and search."""
|
|
texts = ["foo", "bar", "baz"]
|
|
docsearch = DeepLake.from_texts(
|
|
dataset_path="mem://test_path", texts=texts, embedding=FakeEmbeddings()
|
|
)
|
|
output = docsearch.similarity_search("foo", k=1)
|
|
assert output == [Document(page_content="foo")]
|
|
|
|
|
|
def test_deeplake_with_metadatas() -> None:
|
|
"""Test end to end construction and search."""
|
|
texts = ["foo", "bar", "baz"]
|
|
metadatas = [{"page": str(i)} for i in range(len(texts))]
|
|
docsearch = DeepLake.from_texts(
|
|
dataset_path="mem://test_path",
|
|
texts=texts,
|
|
embedding=FakeEmbeddings(),
|
|
metadatas=metadatas,
|
|
)
|
|
output = docsearch.similarity_search("foo", k=1)
|
|
assert output == [Document(page_content="foo", metadata={"page": "0"})]
|
|
|
|
|
|
def test_deeplakewith_persistence() -> None:
|
|
"""Test end to end construction and search, with persistence."""
|
|
dataset_path = "./tests/persist_dir"
|
|
if deeplake.exists(dataset_path):
|
|
deeplake.delete(dataset_path)
|
|
|
|
texts = ["foo", "bar", "baz"]
|
|
docsearch = DeepLake.from_texts(
|
|
dataset_path=dataset_path,
|
|
texts=texts,
|
|
embedding=FakeEmbeddings(),
|
|
)
|
|
|
|
output = docsearch.similarity_search("foo", k=1)
|
|
assert output == [Document(page_content="foo")]
|
|
|
|
# Get a new VectorStore from the persisted directory
|
|
docsearch = DeepLake(
|
|
dataset_path=dataset_path,
|
|
embedding_function=FakeEmbeddings(),
|
|
)
|
|
output = docsearch.similarity_search("foo", k=1)
|
|
|
|
# Clean up
|
|
docsearch.delete_dataset()
|
|
|
|
# Persist doesn't need to be called again
|
|
# Data will be automatically persisted on object deletion
|
|
# Or on program exit
|
|
|
|
|
|
def test_deeplake_overwrite_flag() -> None:
|
|
"""Test overwrite behavior"""
|
|
dataset_path = "./tests/persist_dir"
|
|
if deeplake.exists(dataset_path):
|
|
deeplake.delete(dataset_path)
|
|
|
|
texts = ["foo", "bar", "baz"]
|
|
docsearch = DeepLake.from_texts(
|
|
dataset_path=dataset_path,
|
|
texts=texts,
|
|
embedding=FakeEmbeddings(),
|
|
)
|
|
output = docsearch.similarity_search("foo", k=1)
|
|
assert output == [Document(page_content="foo")]
|
|
|
|
# Get a new VectorStore from the persisted directory, with no overwrite (implicit)
|
|
docsearch = DeepLake(
|
|
dataset_path=dataset_path,
|
|
embedding_function=FakeEmbeddings(),
|
|
)
|
|
output = docsearch.similarity_search("foo", k=1)
|
|
# assert page still present
|
|
assert output == [Document(page_content="foo")]
|
|
|
|
# Get a new VectorStore from the persisted directory, with no overwrite (explicit)
|
|
docsearch = DeepLake(
|
|
dataset_path=dataset_path,
|
|
embedding_function=FakeEmbeddings(),
|
|
overwrite=False,
|
|
)
|
|
output = docsearch.similarity_search("foo", k=1)
|
|
# assert page still present
|
|
assert output == [Document(page_content="foo")]
|
|
|
|
# Get a new VectorStore from the persisted directory, with overwrite
|
|
docsearch = DeepLake(
|
|
dataset_path=dataset_path,
|
|
embedding_function=FakeEmbeddings(),
|
|
overwrite=True,
|
|
)
|
|
with pytest.raises(ValueError):
|
|
output = docsearch.similarity_search("foo", k=1)
|
|
|
|
|
|
def test_similarity_search(deeplake_datastore: DeepLake, distance_metric: str) -> None:
|
|
"""Test similarity search."""
|
|
output = deeplake_datastore.similarity_search(
|
|
"foo", k=1, distance_metric=distance_metric
|
|
)
|
|
assert output == [Document(page_content="foo", metadata={"page": "0"})]
|
|
|
|
tql_query = (
|
|
f"SELECT * WHERE "
|
|
f"id=='{deeplake_datastore.vectorstore.dataset.id[0].numpy()[0]}'"
|
|
)
|
|
with pytest.raises(ValueError):
|
|
output = deeplake_datastore.similarity_search(
|
|
query="foo", tql_query=tql_query, k=1, distance_metric=distance_metric
|
|
)
|
|
deeplake_datastore.delete_dataset()
|
|
|
|
|
|
def test_similarity_search_by_vector(
|
|
deeplake_datastore: DeepLake, distance_metric: str
|
|
) -> None:
|
|
"""Test similarity search by vector."""
|
|
embeddings = FakeEmbeddings().embed_documents(["foo", "bar", "baz"])
|
|
output = deeplake_datastore.similarity_search_by_vector(
|
|
embeddings[1], k=1, distance_metric=distance_metric
|
|
)
|
|
assert output == [Document(page_content="bar", metadata={"page": "1"})]
|
|
deeplake_datastore.delete_dataset()
|
|
|
|
|
|
def test_similarity_search_with_score(
|
|
deeplake_datastore: DeepLake, distance_metric: str
|
|
) -> None:
|
|
"""Test similarity search with score."""
|
|
output, score = deeplake_datastore.similarity_search_with_score(
|
|
"foo", k=1, distance_metric=distance_metric
|
|
)[0]
|
|
assert output == Document(page_content="foo", metadata={"page": "0"})
|
|
if distance_metric == "cos":
|
|
assert score == 1.0
|
|
else:
|
|
assert score == 0.0
|
|
deeplake_datastore.delete_dataset()
|
|
|
|
|
|
def test_similarity_search_with_filter(
|
|
deeplake_datastore: DeepLake, distance_metric: str
|
|
) -> None:
|
|
"""Test similarity search."""
|
|
|
|
output = deeplake_datastore.similarity_search(
|
|
"foo",
|
|
k=1,
|
|
distance_metric=distance_metric,
|
|
filter={"metadata": {"page": "1"}},
|
|
)
|
|
assert output == [Document(page_content="bar", metadata={"page": "1"})]
|
|
deeplake_datastore.delete_dataset()
|
|
|
|
|
|
def test_max_marginal_relevance_search(deeplake_datastore: DeepLake) -> None:
|
|
"""Test max marginal relevance search by vector."""
|
|
|
|
output = deeplake_datastore.max_marginal_relevance_search("foo", k=1, fetch_k=2)
|
|
|
|
assert output == [Document(page_content="foo", metadata={"page": "0"})]
|
|
|
|
embeddings = FakeEmbeddings().embed_documents(["foo", "bar", "baz"])
|
|
output = deeplake_datastore.max_marginal_relevance_search_by_vector(
|
|
embeddings[0], k=1, fetch_k=2
|
|
)
|
|
|
|
assert output == [Document(page_content="foo", metadata={"page": "0"})]
|
|
deeplake_datastore.delete_dataset()
|
|
|
|
|
|
def test_delete_dataset_by_ids(deeplake_datastore: DeepLake) -> None:
|
|
"""Test delete dataset."""
|
|
id = deeplake_datastore.vectorstore.dataset.id.data()["value"][0]
|
|
deeplake_datastore.delete(ids=[id])
|
|
assert (
|
|
deeplake_datastore.similarity_search(
|
|
"foo", k=1, filter={"metadata": {"page": "0"}}
|
|
)
|
|
== []
|
|
)
|
|
assert len(deeplake_datastore.vectorstore) == 2
|
|
|
|
deeplake_datastore.delete_dataset()
|
|
|
|
|
|
def test_delete_dataset_by_filter(deeplake_datastore: DeepLake) -> None:
|
|
"""Test delete dataset."""
|
|
deeplake_datastore.delete(filter={"metadata": {"page": "1"}})
|
|
assert (
|
|
deeplake_datastore.similarity_search(
|
|
"bar", k=1, filter={"metadata": {"page": "1"}}
|
|
)
|
|
== []
|
|
)
|
|
assert len(deeplake_datastore.vectorstore.dataset) == 2
|
|
|
|
deeplake_datastore.delete_dataset()
|
|
|
|
|
|
def test_delete_by_path(deeplake_datastore: DeepLake) -> None:
|
|
"""Test delete dataset."""
|
|
path = deeplake_datastore.dataset_path
|
|
DeepLake.force_delete_by_path(path)
|
|
assert not deeplake.exists(path)
|