2023-02-03 06:05:47 +00:00
|
|
|
"""Fake Embedding class for testing purposes."""
|
Vector store support for Cassandra (#6426)
This addresses #6291 adding support for using Cassandra (and compatible
databases, such as DataStax Astra DB) as a [Vector
Store](https://cwiki.apache.org/confluence/display/CASSANDRA/CEP-30%3A+Approximate+Nearest+Neighbor(ANN)+Vector+Search+via+Storage-Attached+Indexes).
A new class `Cassandra` is introduced, which complies with the contract
and interface for a vector store, along with the corresponding
integration test, a sample notebook and modified dependency toml.
Dependencies: the implementation relies on the library `cassio`, which
simplifies interacting with Cassandra for ML- and LLM-oriented
workloads. CassIO, in turn, uses the `cassandra-driver` low-lever
drivers to communicate with the database. The former is added as
optional dependency (+ in `extended_testing`), the latter was already in
the project.
Integration testing relies on a locally-running instance of Cassandra.
[Here](https://cassio.org/more_info/#use-a-local-vector-capable-cassandra)
a detailed description can be found on how to compile and run it (at the
time of writing the feature has not made it yet to a release).
During development of the integration tests, I added a new "fake
embedding" class for what I consider a more controlled way of testing
the MMR search method. Likewise, I had to amend what looked like a
glitch in the behaviour of `ConsistentFakeEmbeddings` whereby an
`embed_query` call would have bypassed storage of the requested text in
the class cache for use in later repeated invocations.
@dev2049 might be the right person to tag here for a review. Thank you!
---------
Co-authored-by: rlm <pexpresss31@gmail.com>
2023-06-20 17:46:20 +00:00
|
|
|
import math
|
2023-02-03 06:05:47 +00:00
|
|
|
from typing import List
|
|
|
|
|
|
|
|
from langchain.embeddings.base import Embeddings
|
|
|
|
|
|
|
|
fake_texts = ["foo", "bar", "baz"]
|
|
|
|
|
|
|
|
|
|
|
|
class FakeEmbeddings(Embeddings):
|
|
|
|
"""Fake embeddings functionality for testing."""
|
|
|
|
|
|
|
|
def embed_documents(self, texts: List[str]) -> List[List[float]]:
|
2023-03-23 02:40:10 +00:00
|
|
|
"""Return simple embeddings.
|
|
|
|
Embeddings encode each text as its index."""
|
2023-02-17 23:18:09 +00:00
|
|
|
return [[float(1.0)] * 9 + [float(i)] for i in range(len(texts))]
|
2023-02-03 06:05:47 +00:00
|
|
|
|
|
|
|
def embed_query(self, text: str) -> List[float]:
|
2023-03-23 02:40:10 +00:00
|
|
|
"""Return constant query embeddings.
|
|
|
|
Embeddings are identical to embed_documents(texts)[0].
|
|
|
|
Distance to each text will be that text's index,
|
|
|
|
as it was passed to embed_documents."""
|
2023-02-17 23:18:09 +00:00
|
|
|
return [float(1.0)] * 9 + [float(0.0)]
|
2023-05-30 22:33:54 +00:00
|
|
|
|
|
|
|
|
|
|
|
class ConsistentFakeEmbeddings(FakeEmbeddings):
|
|
|
|
"""Fake embeddings which remember all the texts seen so far to return consistent
|
|
|
|
vectors for the same texts."""
|
|
|
|
|
2023-07-11 20:24:35 +00:00
|
|
|
def __init__(self, dimensionality: int = 10) -> None:
|
2023-05-30 22:33:54 +00:00
|
|
|
self.known_texts: List[str] = []
|
2023-07-11 20:24:35 +00:00
|
|
|
self.dimensionality = dimensionality
|
2023-05-30 22:33:54 +00:00
|
|
|
|
|
|
|
def embed_documents(self, texts: List[str]) -> List[List[float]]:
|
|
|
|
"""Return consistent embeddings for each text seen so far."""
|
|
|
|
out_vectors = []
|
|
|
|
for text in texts:
|
|
|
|
if text not in self.known_texts:
|
|
|
|
self.known_texts.append(text)
|
2023-07-11 20:24:35 +00:00
|
|
|
vector = [float(1.0)] * (self.dimensionality - 1) + [
|
|
|
|
float(self.known_texts.index(text))
|
|
|
|
]
|
2023-05-30 22:33:54 +00:00
|
|
|
out_vectors.append(vector)
|
|
|
|
return out_vectors
|
|
|
|
|
|
|
|
def embed_query(self, text: str) -> List[float]:
|
|
|
|
"""Return consistent embeddings for the text, if seen before, or a constant
|
|
|
|
one if the text is unknown."""
|
|
|
|
if text not in self.known_texts:
|
2023-07-11 20:24:35 +00:00
|
|
|
return [float(1.0)] * (self.dimensionality - 1) + [float(0.0)]
|
|
|
|
return [float(1.0)] * (self.dimensionality - 1) + [
|
|
|
|
float(self.known_texts.index(text))
|
|
|
|
]
|
Vector store support for Cassandra (#6426)
This addresses #6291 adding support for using Cassandra (and compatible
databases, such as DataStax Astra DB) as a [Vector
Store](https://cwiki.apache.org/confluence/display/CASSANDRA/CEP-30%3A+Approximate+Nearest+Neighbor(ANN)+Vector+Search+via+Storage-Attached+Indexes).
A new class `Cassandra` is introduced, which complies with the contract
and interface for a vector store, along with the corresponding
integration test, a sample notebook and modified dependency toml.
Dependencies: the implementation relies on the library `cassio`, which
simplifies interacting with Cassandra for ML- and LLM-oriented
workloads. CassIO, in turn, uses the `cassandra-driver` low-lever
drivers to communicate with the database. The former is added as
optional dependency (+ in `extended_testing`), the latter was already in
the project.
Integration testing relies on a locally-running instance of Cassandra.
[Here](https://cassio.org/more_info/#use-a-local-vector-capable-cassandra)
a detailed description can be found on how to compile and run it (at the
time of writing the feature has not made it yet to a release).
During development of the integration tests, I added a new "fake
embedding" class for what I consider a more controlled way of testing
the MMR search method. Likewise, I had to amend what looked like a
glitch in the behaviour of `ConsistentFakeEmbeddings` whereby an
`embed_query` call would have bypassed storage of the requested text in
the class cache for use in later repeated invocations.
@dev2049 might be the right person to tag here for a review. Thank you!
---------
Co-authored-by: rlm <pexpresss31@gmail.com>
2023-06-20 17:46:20 +00:00
|
|
|
|
|
|
|
|
|
|
|
class AngularTwoDimensionalEmbeddings(Embeddings):
|
|
|
|
"""
|
|
|
|
From angles (as strings in units of pi) to unit embedding vectors on a circle.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def embed_documents(self, texts: List[str]) -> List[List[float]]:
|
|
|
|
"""
|
|
|
|
Make a list of texts into a list of embedding vectors.
|
|
|
|
"""
|
|
|
|
return [self.embed_query(text) for text in texts]
|
|
|
|
|
|
|
|
def embed_query(self, text: str) -> List[float]:
|
|
|
|
"""
|
|
|
|
Convert input text to a 'vector' (list of floats).
|
|
|
|
If the text is a number, use it as the angle for the
|
|
|
|
unit vector in units of pi.
|
|
|
|
Any other input text becomes the singular result [0, 0] !
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
angle = float(text)
|
|
|
|
return [math.cos(angle * math.pi), math.sin(angle * math.pi)]
|
|
|
|
except ValueError:
|
|
|
|
# Assume: just test string, no attention is paid to values.
|
|
|
|
return [0.0, 0.0]
|