2023-03-15 01:06:03 +00:00
|
|
|
"""Test Redis functionality."""
|
2023-05-11 07:20:01 +00:00
|
|
|
import pytest
|
|
|
|
|
2023-03-15 01:06:03 +00:00
|
|
|
from langchain.docstore.document import Document
|
|
|
|
from langchain.vectorstores.redis import Redis
|
|
|
|
from tests.integration_tests.vectorstores.fake_embeddings import FakeEmbeddings
|
|
|
|
|
2023-04-29 03:47:18 +00:00
|
|
|
TEST_INDEX_NAME = "test"
|
|
|
|
TEST_REDIS_URL = "redis://localhost:6379"
|
|
|
|
TEST_SINGLE_RESULT = [Document(page_content="foo")]
|
|
|
|
TEST_RESULT = [Document(page_content="foo"), Document(page_content="foo")]
|
2023-05-11 07:20:01 +00:00
|
|
|
COSINE_SCORE = pytest.approx(0.05, abs=0.002)
|
|
|
|
IP_SCORE = -8.0
|
|
|
|
EUCLIDEAN_SCORE = 1.0
|
2023-04-29 03:47:18 +00:00
|
|
|
|
|
|
|
|
|
|
|
def drop(index_name: str) -> bool:
|
|
|
|
return Redis.drop_index(
|
|
|
|
index_name=index_name, delete_documents=True, redis_url=TEST_REDIS_URL
|
|
|
|
)
|
|
|
|
|
2023-03-15 01:06:03 +00:00
|
|
|
|
|
|
|
def test_redis() -> None:
|
|
|
|
"""Test end to end construction and search."""
|
|
|
|
texts = ["foo", "bar", "baz"]
|
2023-04-29 03:47:18 +00:00
|
|
|
docsearch = Redis.from_texts(texts, FakeEmbeddings(), redis_url=TEST_REDIS_URL)
|
2023-03-15 01:06:03 +00:00
|
|
|
output = docsearch.similarity_search("foo", k=1)
|
2023-04-29 03:47:18 +00:00
|
|
|
assert output == TEST_SINGLE_RESULT
|
|
|
|
assert drop(docsearch.index_name)
|
2023-03-15 01:06:03 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_redis_new_vector() -> None:
|
|
|
|
"""Test adding a new document"""
|
|
|
|
texts = ["foo", "bar", "baz"]
|
2023-04-29 03:47:18 +00:00
|
|
|
docsearch = Redis.from_texts(texts, FakeEmbeddings(), redis_url=TEST_REDIS_URL)
|
|
|
|
docsearch.add_texts(["foo"])
|
|
|
|
output = docsearch.similarity_search("foo", k=2)
|
|
|
|
assert output == TEST_RESULT
|
|
|
|
assert drop(docsearch.index_name)
|
|
|
|
|
|
|
|
|
|
|
|
def test_redis_from_existing() -> None:
|
|
|
|
"""Test adding a new document"""
|
|
|
|
texts = ["foo", "bar", "baz"]
|
|
|
|
Redis.from_texts(
|
|
|
|
texts, FakeEmbeddings(), index_name=TEST_INDEX_NAME, redis_url=TEST_REDIS_URL
|
|
|
|
)
|
|
|
|
# Test creating from an existing
|
|
|
|
docsearch2 = Redis.from_existing_index(
|
|
|
|
FakeEmbeddings(), index_name=TEST_INDEX_NAME, redis_url=TEST_REDIS_URL
|
|
|
|
)
|
|
|
|
output = docsearch2.similarity_search("foo", k=1)
|
|
|
|
assert output == TEST_SINGLE_RESULT
|
|
|
|
|
|
|
|
|
|
|
|
def test_redis_add_texts_to_existing() -> None:
|
|
|
|
"""Test adding a new document"""
|
|
|
|
# Test creating from an existing
|
|
|
|
docsearch = Redis.from_existing_index(
|
|
|
|
FakeEmbeddings(), index_name=TEST_INDEX_NAME, redis_url=TEST_REDIS_URL
|
2023-03-15 01:06:03 +00:00
|
|
|
)
|
|
|
|
docsearch.add_texts(["foo"])
|
|
|
|
output = docsearch.similarity_search("foo", k=2)
|
2023-04-29 03:47:18 +00:00
|
|
|
assert output == TEST_RESULT
|
|
|
|
assert drop(TEST_INDEX_NAME)
|
2023-05-11 07:20:01 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_cosine() -> None:
|
|
|
|
"""Test cosine distance."""
|
|
|
|
texts = ["foo", "bar", "baz"]
|
|
|
|
docsearch = Redis.from_texts(
|
|
|
|
texts,
|
|
|
|
FakeEmbeddings(),
|
|
|
|
redis_url=TEST_REDIS_URL,
|
|
|
|
distance_metric="COSINE",
|
|
|
|
)
|
|
|
|
output = docsearch.similarity_search_with_score("far", k=2)
|
|
|
|
_, score = output[1]
|
|
|
|
assert score == COSINE_SCORE
|
|
|
|
assert drop(docsearch.index_name)
|
|
|
|
|
|
|
|
|
|
|
|
def test_l2() -> None:
|
|
|
|
"""Test Flat L2 distance."""
|
|
|
|
texts = ["foo", "bar", "baz"]
|
|
|
|
docsearch = Redis.from_texts(
|
|
|
|
texts, FakeEmbeddings(), redis_url=TEST_REDIS_URL, distance_metric="L2"
|
|
|
|
)
|
|
|
|
output = docsearch.similarity_search_with_score("far", k=2)
|
|
|
|
_, score = output[1]
|
|
|
|
assert score == EUCLIDEAN_SCORE
|
|
|
|
assert drop(docsearch.index_name)
|
|
|
|
|
|
|
|
|
|
|
|
def test_ip() -> None:
|
|
|
|
"""Test inner product distance."""
|
|
|
|
texts = ["foo", "bar", "baz"]
|
|
|
|
docsearch = Redis.from_texts(
|
|
|
|
texts, FakeEmbeddings(), redis_url=TEST_REDIS_URL, distance_metric="IP"
|
|
|
|
)
|
|
|
|
output = docsearch.similarity_search_with_score("far", k=2)
|
|
|
|
_, score = output[1]
|
|
|
|
assert score == IP_SCORE
|
|
|
|
assert drop(docsearch.index_name)
|