2023-02-27 06:35:04 +00:00
|
|
|
"""Test Deep Lake functionality."""
|
2023-04-06 19:47:33 +00:00
|
|
|
import deeplake
|
|
|
|
import pytest
|
|
|
|
from pytest import FixtureRequest
|
|
|
|
|
2023-02-27 06:35:04 +00:00
|
|
|
from langchain.docstore.document import Document
|
|
|
|
from langchain.vectorstores import DeepLake
|
|
|
|
from tests.integration_tests.vectorstores.fake_embeddings import FakeEmbeddings
|
|
|
|
|
|
|
|
|
2023-04-06 19:47:33 +00:00
|
|
|
@pytest.fixture
|
|
|
|
def deeplake_datastore() -> DeepLake:
|
|
|
|
texts = ["foo", "bar", "baz"]
|
|
|
|
metadatas = [{"page": str(i)} for i in range(len(texts))]
|
|
|
|
docsearch = DeepLake.from_texts(
|
Added deeplake use case examples of the new features (#6528)
<!--
Thank you for contributing to LangChain! Your PR will appear in our
release under the title you set. Please make sure it highlights your
valuable contribution.
Replace this with a description of the change, the issue it fixes (if
applicable), and relevant context. List any dependencies required for
this change.
After you're done, someone will review your PR. They may suggest
improvements. If no one reviews your PR within a few days, feel free to
@-mention the same people again, as notifications can get lost.
Finally, we'd love to show appreciation for your contribution - if you'd
like us to shout you out on Twitter, please also include your handle!
-->
<!-- Remove if not applicable -->
Fixes # (issue)
#### Before submitting
<!-- If you're adding a new integration, please include:
1. a test for the integration - favor unit tests that does not rely on
network access.
2. an example notebook showing its use
See contribution guidelines for more information on how to write tests,
lint
etc:
https://github.com/hwchase17/langchain/blob/master/.github/CONTRIBUTING.md
-->
#### Who can review?
Tag maintainers/contributors who might be interested:
<!-- For a quicker response, figure out the right person to tag with @
@hwchase17 - project lead
Tracing / Callbacks
- @agola11
Async
- @agola11
DataLoaders
- @eyurtsev
Models
- @hwchase17
- @agola11
Agents / Tools / Toolkits
- @hwchase17
VectorStores / Retrievers / Memory
- @dev2049
-->
1. Added use cases of the new features
2. Done some code refactoring
---------
Co-authored-by: Ivo Stranic <istranic@gmail.com>
2023-07-10 14:04:29 +00:00
|
|
|
dataset_path="./test_path",
|
2023-04-06 19:47:33 +00:00
|
|
|
texts=texts,
|
|
|
|
metadatas=metadatas,
|
|
|
|
embedding=FakeEmbeddings(),
|
Added deeplake use case examples of the new features (#6528)
<!--
Thank you for contributing to LangChain! Your PR will appear in our
release under the title you set. Please make sure it highlights your
valuable contribution.
Replace this with a description of the change, the issue it fixes (if
applicable), and relevant context. List any dependencies required for
this change.
After you're done, someone will review your PR. They may suggest
improvements. If no one reviews your PR within a few days, feel free to
@-mention the same people again, as notifications can get lost.
Finally, we'd love to show appreciation for your contribution - if you'd
like us to shout you out on Twitter, please also include your handle!
-->
<!-- Remove if not applicable -->
Fixes # (issue)
#### Before submitting
<!-- If you're adding a new integration, please include:
1. a test for the integration - favor unit tests that does not rely on
network access.
2. an example notebook showing its use
See contribution guidelines for more information on how to write tests,
lint
etc:
https://github.com/hwchase17/langchain/blob/master/.github/CONTRIBUTING.md
-->
#### Who can review?
Tag maintainers/contributors who might be interested:
<!-- For a quicker response, figure out the right person to tag with @
@hwchase17 - project lead
Tracing / Callbacks
- @agola11
Async
- @agola11
DataLoaders
- @eyurtsev
Models
- @hwchase17
- @agola11
Agents / Tools / Toolkits
- @hwchase17
VectorStores / Retrievers / Memory
- @dev2049
-->
1. Added use cases of the new features
2. Done some code refactoring
---------
Co-authored-by: Ivo Stranic <istranic@gmail.com>
2023-07-10 14:04:29 +00:00
|
|
|
overwrite=True,
|
2023-04-06 19:47:33 +00:00
|
|
|
)
|
|
|
|
return docsearch
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(params=["L1", "L2", "max", "cos"])
|
|
|
|
def distance_metric(request: FixtureRequest) -> str:
|
|
|
|
return request.param
|
|
|
|
|
|
|
|
|
2023-02-27 06:35:04 +00:00
|
|
|
def test_deeplake() -> None:
|
|
|
|
"""Test end to end construction and search."""
|
|
|
|
texts = ["foo", "bar", "baz"]
|
|
|
|
docsearch = DeepLake.from_texts(
|
|
|
|
dataset_path="mem://test_path", texts=texts, embedding=FakeEmbeddings()
|
|
|
|
)
|
|
|
|
output = docsearch.similarity_search("foo", k=1)
|
|
|
|
assert output == [Document(page_content="foo")]
|
|
|
|
|
|
|
|
|
|
|
|
def test_deeplake_with_metadatas() -> None:
|
|
|
|
"""Test end to end construction and search."""
|
|
|
|
texts = ["foo", "bar", "baz"]
|
|
|
|
metadatas = [{"page": str(i)} for i in range(len(texts))]
|
|
|
|
docsearch = DeepLake.from_texts(
|
|
|
|
dataset_path="mem://test_path",
|
|
|
|
texts=texts,
|
|
|
|
embedding=FakeEmbeddings(),
|
|
|
|
metadatas=metadatas,
|
|
|
|
)
|
|
|
|
output = docsearch.similarity_search("foo", k=1)
|
|
|
|
assert output == [Document(page_content="foo", metadata={"page": "0"})]
|
|
|
|
|
|
|
|
|
|
|
|
def test_deeplakewith_persistence() -> None:
|
|
|
|
"""Test end to end construction and search, with persistence."""
|
|
|
|
dataset_path = "./tests/persist_dir"
|
2023-04-06 19:47:33 +00:00
|
|
|
if deeplake.exists(dataset_path):
|
|
|
|
deeplake.delete(dataset_path)
|
|
|
|
|
2023-02-27 06:35:04 +00:00
|
|
|
texts = ["foo", "bar", "baz"]
|
|
|
|
docsearch = DeepLake.from_texts(
|
|
|
|
dataset_path=dataset_path,
|
|
|
|
texts=texts,
|
|
|
|
embedding=FakeEmbeddings(),
|
|
|
|
)
|
|
|
|
|
|
|
|
output = docsearch.similarity_search("foo", k=1)
|
|
|
|
assert output == [Document(page_content="foo")]
|
|
|
|
|
|
|
|
# Get a new VectorStore from the persisted directory
|
|
|
|
docsearch = DeepLake(
|
|
|
|
dataset_path=dataset_path,
|
|
|
|
embedding_function=FakeEmbeddings(),
|
|
|
|
)
|
|
|
|
output = docsearch.similarity_search("foo", k=1)
|
|
|
|
|
|
|
|
# Clean up
|
|
|
|
docsearch.delete_dataset()
|
|
|
|
|
|
|
|
# Persist doesn't need to be called again
|
|
|
|
# Data will be automatically persisted on object deletion
|
|
|
|
# Or on program exit
|
2023-04-06 19:47:33 +00:00
|
|
|
|
|
|
|
|
2023-05-16 00:39:16 +00:00
|
|
|
def test_deeplake_overwrite_flag() -> None:
|
|
|
|
"""Test overwrite behavior"""
|
|
|
|
dataset_path = "./tests/persist_dir"
|
|
|
|
if deeplake.exists(dataset_path):
|
|
|
|
deeplake.delete(dataset_path)
|
|
|
|
|
|
|
|
texts = ["foo", "bar", "baz"]
|
|
|
|
docsearch = DeepLake.from_texts(
|
|
|
|
dataset_path=dataset_path,
|
|
|
|
texts=texts,
|
|
|
|
embedding=FakeEmbeddings(),
|
|
|
|
)
|
|
|
|
output = docsearch.similarity_search("foo", k=1)
|
|
|
|
assert output == [Document(page_content="foo")]
|
|
|
|
|
|
|
|
# Get a new VectorStore from the persisted directory, with no overwrite (implicit)
|
|
|
|
docsearch = DeepLake(
|
|
|
|
dataset_path=dataset_path,
|
|
|
|
embedding_function=FakeEmbeddings(),
|
|
|
|
)
|
|
|
|
output = docsearch.similarity_search("foo", k=1)
|
|
|
|
# assert page still present
|
|
|
|
assert output == [Document(page_content="foo")]
|
|
|
|
|
|
|
|
# Get a new VectorStore from the persisted directory, with no overwrite (explicit)
|
|
|
|
docsearch = DeepLake(
|
|
|
|
dataset_path=dataset_path,
|
|
|
|
embedding_function=FakeEmbeddings(),
|
|
|
|
overwrite=False,
|
|
|
|
)
|
|
|
|
output = docsearch.similarity_search("foo", k=1)
|
|
|
|
# assert page still present
|
|
|
|
assert output == [Document(page_content="foo")]
|
|
|
|
|
|
|
|
# Get a new VectorStore from the persisted directory, with overwrite
|
|
|
|
docsearch = DeepLake(
|
|
|
|
dataset_path=dataset_path,
|
|
|
|
embedding_function=FakeEmbeddings(),
|
|
|
|
overwrite=True,
|
|
|
|
)
|
2023-06-17 00:53:55 +00:00
|
|
|
with pytest.raises(ValueError):
|
|
|
|
output = docsearch.similarity_search("foo", k=1)
|
2023-05-16 00:39:16 +00:00
|
|
|
|
|
|
|
|
2023-04-06 19:47:33 +00:00
|
|
|
def test_similarity_search(deeplake_datastore: DeepLake, distance_metric: str) -> None:
|
|
|
|
"""Test similarity search."""
|
|
|
|
output = deeplake_datastore.similarity_search(
|
|
|
|
"foo", k=1, distance_metric=distance_metric
|
|
|
|
)
|
|
|
|
assert output == [Document(page_content="foo", metadata={"page": "0"})]
|
Added deeplake use case examples of the new features (#6528)
<!--
Thank you for contributing to LangChain! Your PR will appear in our
release under the title you set. Please make sure it highlights your
valuable contribution.
Replace this with a description of the change, the issue it fixes (if
applicable), and relevant context. List any dependencies required for
this change.
After you're done, someone will review your PR. They may suggest
improvements. If no one reviews your PR within a few days, feel free to
@-mention the same people again, as notifications can get lost.
Finally, we'd love to show appreciation for your contribution - if you'd
like us to shout you out on Twitter, please also include your handle!
-->
<!-- Remove if not applicable -->
Fixes # (issue)
#### Before submitting
<!-- If you're adding a new integration, please include:
1. a test for the integration - favor unit tests that does not rely on
network access.
2. an example notebook showing its use
See contribution guidelines for more information on how to write tests,
lint
etc:
https://github.com/hwchase17/langchain/blob/master/.github/CONTRIBUTING.md
-->
#### Who can review?
Tag maintainers/contributors who might be interested:
<!-- For a quicker response, figure out the right person to tag with @
@hwchase17 - project lead
Tracing / Callbacks
- @agola11
Async
- @agola11
DataLoaders
- @eyurtsev
Models
- @hwchase17
- @agola11
Agents / Tools / Toolkits
- @hwchase17
VectorStores / Retrievers / Memory
- @dev2049
-->
1. Added use cases of the new features
2. Done some code refactoring
---------
Co-authored-by: Ivo Stranic <istranic@gmail.com>
2023-07-10 14:04:29 +00:00
|
|
|
|
|
|
|
tql_query = (
|
|
|
|
f"SELECT * WHERE "
|
|
|
|
f"id=='{deeplake_datastore.vectorstore.dataset.id[0].numpy()[0]}'"
|
|
|
|
)
|
|
|
|
with pytest.raises(ValueError):
|
|
|
|
output = deeplake_datastore.similarity_search(
|
|
|
|
query="foo", tql_query=tql_query, k=1, distance_metric=distance_metric
|
|
|
|
)
|
2023-04-06 19:47:33 +00:00
|
|
|
deeplake_datastore.delete_dataset()
|
|
|
|
|
|
|
|
|
|
|
|
def test_similarity_search_by_vector(
|
|
|
|
deeplake_datastore: DeepLake, distance_metric: str
|
|
|
|
) -> None:
|
|
|
|
"""Test similarity search by vector."""
|
|
|
|
embeddings = FakeEmbeddings().embed_documents(["foo", "bar", "baz"])
|
|
|
|
output = deeplake_datastore.similarity_search_by_vector(
|
|
|
|
embeddings[1], k=1, distance_metric=distance_metric
|
|
|
|
)
|
|
|
|
assert output == [Document(page_content="bar", metadata={"page": "1"})]
|
|
|
|
deeplake_datastore.delete_dataset()
|
|
|
|
|
|
|
|
|
|
|
|
def test_similarity_search_with_score(
|
|
|
|
deeplake_datastore: DeepLake, distance_metric: str
|
|
|
|
) -> None:
|
|
|
|
"""Test similarity search with score."""
|
|
|
|
output, score = deeplake_datastore.similarity_search_with_score(
|
|
|
|
"foo", k=1, distance_metric=distance_metric
|
|
|
|
)[0]
|
|
|
|
assert output == Document(page_content="foo", metadata={"page": "0"})
|
|
|
|
if distance_metric == "cos":
|
|
|
|
assert score == 1.0
|
|
|
|
else:
|
|
|
|
assert score == 0.0
|
|
|
|
deeplake_datastore.delete_dataset()
|
|
|
|
|
|
|
|
|
|
|
|
def test_similarity_search_with_filter(
|
|
|
|
deeplake_datastore: DeepLake, distance_metric: str
|
|
|
|
) -> None:
|
|
|
|
"""Test similarity search."""
|
|
|
|
|
|
|
|
output = deeplake_datastore.similarity_search(
|
2023-06-17 00:53:55 +00:00
|
|
|
"foo",
|
|
|
|
k=1,
|
|
|
|
distance_metric=distance_metric,
|
|
|
|
filter={"metadata": {"page": "1"}},
|
2023-04-06 19:47:33 +00:00
|
|
|
)
|
|
|
|
assert output == [Document(page_content="bar", metadata={"page": "1"})]
|
|
|
|
deeplake_datastore.delete_dataset()
|
|
|
|
|
|
|
|
|
|
|
|
def test_max_marginal_relevance_search(deeplake_datastore: DeepLake) -> None:
|
|
|
|
"""Test max marginal relevance search by vector."""
|
|
|
|
|
|
|
|
output = deeplake_datastore.max_marginal_relevance_search("foo", k=1, fetch_k=2)
|
|
|
|
|
|
|
|
assert output == [Document(page_content="foo", metadata={"page": "0"})]
|
|
|
|
|
|
|
|
embeddings = FakeEmbeddings().embed_documents(["foo", "bar", "baz"])
|
|
|
|
output = deeplake_datastore.max_marginal_relevance_search_by_vector(
|
|
|
|
embeddings[0], k=1, fetch_k=2
|
|
|
|
)
|
|
|
|
|
|
|
|
assert output == [Document(page_content="foo", metadata={"page": "0"})]
|
|
|
|
deeplake_datastore.delete_dataset()
|
|
|
|
|
|
|
|
|
|
|
|
def test_delete_dataset_by_ids(deeplake_datastore: DeepLake) -> None:
|
|
|
|
"""Test delete dataset."""
|
2023-06-17 00:53:55 +00:00
|
|
|
id = deeplake_datastore.vectorstore.dataset.id.data()["value"][0]
|
2023-04-06 19:47:33 +00:00
|
|
|
deeplake_datastore.delete(ids=[id])
|
2023-06-17 00:53:55 +00:00
|
|
|
assert (
|
|
|
|
deeplake_datastore.similarity_search(
|
|
|
|
"foo", k=1, filter={"metadata": {"page": "0"}}
|
|
|
|
)
|
|
|
|
== []
|
|
|
|
)
|
|
|
|
assert len(deeplake_datastore.vectorstore) == 2
|
2023-04-06 19:47:33 +00:00
|
|
|
|
|
|
|
deeplake_datastore.delete_dataset()
|
|
|
|
|
|
|
|
|
|
|
|
def test_delete_dataset_by_filter(deeplake_datastore: DeepLake) -> None:
|
|
|
|
"""Test delete dataset."""
|
2023-06-17 00:53:55 +00:00
|
|
|
deeplake_datastore.delete(filter={"metadata": {"page": "1"}})
|
|
|
|
assert (
|
|
|
|
deeplake_datastore.similarity_search(
|
|
|
|
"bar", k=1, filter={"metadata": {"page": "1"}}
|
|
|
|
)
|
|
|
|
== []
|
|
|
|
)
|
|
|
|
assert len(deeplake_datastore.vectorstore.dataset) == 2
|
2023-04-06 19:47:33 +00:00
|
|
|
|
|
|
|
deeplake_datastore.delete_dataset()
|
2023-04-24 04:23:54 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_delete_by_path(deeplake_datastore: DeepLake) -> None:
|
|
|
|
"""Test delete dataset."""
|
|
|
|
path = deeplake_datastore.dataset_path
|
|
|
|
DeepLake.force_delete_by_path(path)
|
|
|
|
assert not deeplake.exists(path)
|