mirror of
https://github.com/hwchase17/langchain
synced 2024-11-02 09:40:22 +00:00
ed58eeb9c5
Moved the following modules to new package langchain-community in a backwards compatible fashion: ``` mv langchain/langchain/adapters community/langchain_community mv langchain/langchain/callbacks community/langchain_community/callbacks mv langchain/langchain/chat_loaders community/langchain_community mv langchain/langchain/chat_models community/langchain_community mv langchain/langchain/document_loaders community/langchain_community mv langchain/langchain/docstore community/langchain_community mv langchain/langchain/document_transformers community/langchain_community mv langchain/langchain/embeddings community/langchain_community mv langchain/langchain/graphs community/langchain_community mv langchain/langchain/llms community/langchain_community mv langchain/langchain/memory/chat_message_histories community/langchain_community mv langchain/langchain/retrievers community/langchain_community mv langchain/langchain/storage community/langchain_community mv langchain/langchain/tools community/langchain_community mv langchain/langchain/utilities community/langchain_community mv langchain/langchain/vectorstores community/langchain_community mv langchain/langchain/agents/agent_toolkits community/langchain_community mv langchain/langchain/cache.py community/langchain_community mv langchain/langchain/adapters community/langchain_community mv langchain/langchain/callbacks community/langchain_community/callbacks mv langchain/langchain/chat_loaders community/langchain_community mv langchain/langchain/chat_models community/langchain_community mv langchain/langchain/document_loaders community/langchain_community mv langchain/langchain/docstore community/langchain_community mv langchain/langchain/document_transformers community/langchain_community mv langchain/langchain/embeddings community/langchain_community mv langchain/langchain/graphs community/langchain_community mv langchain/langchain/llms community/langchain_community mv langchain/langchain/memory/chat_message_histories community/langchain_community mv langchain/langchain/retrievers community/langchain_community mv langchain/langchain/storage community/langchain_community mv langchain/langchain/tools community/langchain_community mv langchain/langchain/utilities community/langchain_community mv langchain/langchain/vectorstores community/langchain_community mv langchain/langchain/agents/agent_toolkits community/langchain_community mv langchain/langchain/cache.py community/langchain_community ``` Moved the following to core ``` mv langchain/langchain/utils/json_schema.py core/langchain_core/utils mv langchain/langchain/utils/html.py core/langchain_core/utils mv langchain/langchain/utils/strings.py core/langchain_core/utils cat langchain/langchain/utils/env.py >> core/langchain_core/utils/env.py rm langchain/langchain/utils/env.py ``` See .scripts/community_split/script_integrations.sh for all changes
149 lines
4.8 KiB
Python
149 lines
4.8 KiB
Python
from __future__ import annotations
|
|
|
|
import itertools
|
|
from typing import TYPE_CHECKING, Any, Iterable, List, Optional, Tuple
|
|
|
|
from langchain_core.documents import Document
|
|
from langchain_core.embeddings import Embeddings
|
|
from langchain_core.vectorstores import VectorStore
|
|
|
|
if TYPE_CHECKING:
|
|
from tigrisdb import TigrisClient
|
|
from tigrisdb import VectorStore as TigrisVectorStore
|
|
from tigrisdb.types.filters import Filter as TigrisFilter
|
|
from tigrisdb.types.vector import Document as TigrisDocument
|
|
|
|
|
|
class Tigris(VectorStore):
|
|
"""`Tigris` vector store."""
|
|
|
|
def __init__(self, client: TigrisClient, embeddings: Embeddings, index_name: str):
|
|
"""Initialize Tigris vector store."""
|
|
try:
|
|
import tigrisdb # noqa: F401
|
|
except ImportError:
|
|
raise ImportError(
|
|
"Could not import tigrisdb python package. "
|
|
"Please install it with `pip install tigrisdb`"
|
|
)
|
|
|
|
self._embed_fn = embeddings
|
|
self._vector_store = TigrisVectorStore(client.get_search(), index_name)
|
|
|
|
@property
|
|
def embeddings(self) -> Embeddings:
|
|
return self._embed_fn
|
|
|
|
@property
|
|
def search_index(self) -> TigrisVectorStore:
|
|
return self._vector_store
|
|
|
|
def add_texts(
|
|
self,
|
|
texts: Iterable[str],
|
|
metadatas: Optional[List[dict]] = None,
|
|
ids: Optional[List[str]] = None,
|
|
**kwargs: Any,
|
|
) -> List[str]:
|
|
"""Run more texts through the embeddings and add to the vectorstore.
|
|
|
|
Args:
|
|
texts: Iterable of strings to add to the vectorstore.
|
|
metadatas: Optional list of metadatas associated with the texts.
|
|
ids: Optional list of ids for documents.
|
|
Ids will be autogenerated if not provided.
|
|
kwargs: vectorstore specific parameters
|
|
|
|
Returns:
|
|
List of ids from adding the texts into the vectorstore.
|
|
"""
|
|
docs = self._prep_docs(texts, metadatas, ids)
|
|
result = self.search_index.add_documents(docs)
|
|
return [r.id for r in result]
|
|
|
|
def similarity_search(
|
|
self,
|
|
query: str,
|
|
k: int = 4,
|
|
filter: Optional[TigrisFilter] = None,
|
|
**kwargs: Any,
|
|
) -> List[Document]:
|
|
"""Return docs most similar to query."""
|
|
docs_with_scores = self.similarity_search_with_score(query, k, filter)
|
|
return [doc for doc, _ in docs_with_scores]
|
|
|
|
def similarity_search_with_score(
|
|
self,
|
|
query: str,
|
|
k: int = 4,
|
|
filter: Optional[TigrisFilter] = None,
|
|
) -> List[Tuple[Document, float]]:
|
|
"""Run similarity search with Chroma with distance.
|
|
|
|
Args:
|
|
query (str): Query text to search for.
|
|
k (int): Number of results to return. Defaults to 4.
|
|
filter (Optional[TigrisFilter]): Filter by metadata. Defaults to None.
|
|
|
|
Returns:
|
|
List[Tuple[Document, float]]: List of documents most similar to the query
|
|
text with distance in float.
|
|
"""
|
|
vector = self._embed_fn.embed_query(query)
|
|
result = self.search_index.similarity_search(
|
|
vector=vector, k=k, filter_by=filter
|
|
)
|
|
docs: List[Tuple[Document, float]] = []
|
|
for r in result:
|
|
docs.append(
|
|
(
|
|
Document(
|
|
page_content=r.doc["text"], metadata=r.doc.get("metadata")
|
|
),
|
|
r.score,
|
|
)
|
|
)
|
|
return docs
|
|
|
|
@classmethod
|
|
def from_texts(
|
|
cls,
|
|
texts: List[str],
|
|
embedding: Embeddings,
|
|
metadatas: Optional[List[dict]] = None,
|
|
ids: Optional[List[str]] = None,
|
|
client: Optional[TigrisClient] = None,
|
|
index_name: Optional[str] = None,
|
|
**kwargs: Any,
|
|
) -> Tigris:
|
|
"""Return VectorStore initialized from texts and embeddings."""
|
|
if not index_name:
|
|
raise ValueError("`index_name` is required")
|
|
|
|
if not client:
|
|
client = TigrisClient()
|
|
store = cls(client, embedding, index_name)
|
|
store.add_texts(texts=texts, metadatas=metadatas, ids=ids)
|
|
return store
|
|
|
|
def _prep_docs(
|
|
self,
|
|
texts: Iterable[str],
|
|
metadatas: Optional[List[dict]],
|
|
ids: Optional[List[str]],
|
|
) -> List[TigrisDocument]:
|
|
embeddings: List[List[float]] = self._embed_fn.embed_documents(list(texts))
|
|
docs: List[TigrisDocument] = []
|
|
for t, m, e, _id in itertools.zip_longest(
|
|
texts, metadatas or [], embeddings or [], ids or []
|
|
):
|
|
doc: TigrisDocument = {
|
|
"text": t,
|
|
"embeddings": e or [],
|
|
"metadata": m or {},
|
|
}
|
|
if _id:
|
|
doc["id"] = _id
|
|
docs.append(doc)
|
|
return docs
|