diff --git a/libs/partners/postgres/LICENSE b/libs/partners/postgres/LICENSE deleted file mode 100644 index fc0602feec..0000000000 --- a/libs/partners/postgres/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2024 LangChain, Inc. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/libs/partners/postgres/Makefile b/libs/partners/postgres/Makefile deleted file mode 100644 index 1952795bd1..0000000000 --- a/libs/partners/postgres/Makefile +++ /dev/null @@ -1,56 +0,0 @@ -.PHONY: all format lint test tests integration_tests docker_tests help extended_tests - -# Default target executed when no arguments are given to make. -all: help - -# Define a variable for the test file path. -TEST_FILE ?= tests/unit_tests/ -integration_test integration_tests: TEST_FILE = tests/integration_tests/ - -test tests integration_test integration_tests: - poetry run pytest $(TEST_FILE) - -###################### -# LINTING AND FORMATTING -###################### - -# Define a variable for Python and notebook files. -PYTHON_FILES=. -MYPY_CACHE=.mypy_cache -lint format: PYTHON_FILES=. -lint_diff format_diff: PYTHON_FILES=$(shell git diff --relative=libs/partners/postgres --name-only --diff-filter=d master | grep -E '\.py$$|\.ipynb$$') -lint_package: PYTHON_FILES=langchain_postgres -lint_tests: PYTHON_FILES=tests -lint_tests: MYPY_CACHE=.mypy_cache_test - -lint lint_diff lint_package lint_tests: - poetry run ruff . - poetry run ruff format $(PYTHON_FILES) --diff - poetry run ruff --select I $(PYTHON_FILES) - mkdir -p $(MYPY_CACHE); poetry run mypy $(PYTHON_FILES) --cache-dir $(MYPY_CACHE) - -format format_diff: - poetry run ruff format $(PYTHON_FILES) - poetry run ruff --select I --fix $(PYTHON_FILES) - -spell_check: - poetry run codespell --toml pyproject.toml - -spell_fix: - poetry run codespell --toml pyproject.toml -w - -check_imports: $(shell find langchain_postgres -name '*.py') - poetry run python ./scripts/check_imports.py $^ - -###################### -# HELP -###################### - -help: - @echo '----' - @echo 'check_imports - check imports' - @echo 'format - run code formatters' - @echo 'lint - run linters' - @echo 'test - run unit tests' - @echo 'tests - run unit tests' - @echo 'test TEST_FILE= - run all tests in file' diff --git a/libs/partners/postgres/README.md b/libs/partners/postgres/README.md index 55084835b4..94279efe1c 100644 --- a/libs/partners/postgres/README.md +++ b/libs/partners/postgres/README.md @@ -1,123 +1,4 @@ -# langchain-postgres +This package has moved! -The `langchain-postgres` package is an integration package managed by the core LangChain team. +https://github.com/langchain-ai/langchain-postgres/ -This package contains implementations of core abstractions using `Postgres`. - -The package is released under the MIT license. - -Feel free to use the abstraction as provided or else modify them / extend them as appropriate for your own application. - -## Installation - -```bash -pip install -U langchain-postgres -``` - -## Usage - -### ChatMessageHistory - -The chat message history abstraction helps to persist chat message history -in a postgres table. - -PostgresChatMessageHistory is parameterized using a `table_name` and a `session_id`. - -The `table_name` is the name of the table in the database where -the chat messages will be stored. - -The `session_id` is a unique identifier for the chat session. It can be assigned -by the caller using `uuid.uuid4()`. - -```python -import uuid - -from langchain_core.messages import SystemMessage, AIMessage, HumanMessage -from langchain_postgres import PostgresChatMessageHistory -import psycopg - -# Establish a synchronous connection to the database -# (or use psycopg.AsyncConnection for async) -conn_info = ... # Fill in with your connection info -sync_connection = psycopg.connect(conn_info) - -# Create the table schema (only needs to be done once) -table_name = "chat_history" -PostgresChatMessageHistory.create_schema(sync_connection, table_name) - -session_id = str(uuid.uuid4()) - -# Initialize the chat history manager -chat_history = PostgresChatMessageHistory( - table_name, - session_id, - sync_connection=sync_connection -) - -# Add messages to the chat history -chat_history.add_messages([ - SystemMessage(content="Meow"), - AIMessage(content="woof"), - HumanMessage(content="bark"), -]) - -print(chat_history.messages) -``` - - -### PostgresCheckpoint - -An implementation of the `Checkpoint` abstraction in LangGraph using Postgres. - - -Async Usage: - -```python -from psycopg_pool import AsyncConnectionPool -from langchain_postgres import ( - PostgresCheckpoint, PickleCheckpointSerializer -) - -pool = AsyncConnectionPool( - # Example configuration - conninfo="postgresql://user:password@localhost:5432/dbname", - max_size=20, -) - -# Uses the pickle module for serialization -# Make sure that you're only de-serializing trusted data -# (e.g., payloads that you have serialized yourself). -# Or implement a custom serializer. -checkpoint = PostgresCheckpoint( - serializer=PickleCheckpointSerializer(), - async_connection=pool, -) - -# Use the checkpoint object to put, get, list checkpoints, etc. -``` - -Sync Usage: - -```python -from psycopg_pool import ConnectionPool -from langchain_postgres import ( - PostgresCheckpoint, PickleCheckpointSerializer -) - -pool = ConnectionPool( - # Example configuration - conninfo="postgresql://user:password@localhost:5432/dbname", - max_size=20, -) - -# Uses the pickle module for serialization -# Make sure that you're only de-serializing trusted data -# (e.g., payloads that you have serialized yourself). -# Or implement a custom serializer. -checkpoint = PostgresCheckpoint( - serializer=PickleCheckpointSerializer(), - sync_connection=pool, -) - -# Use the checkpoint object to put, get, list checkpoints, etc. -``` diff --git a/libs/partners/postgres/langchain_postgres/__init__.py b/libs/partners/postgres/langchain_postgres/__init__.py deleted file mode 100644 index ddda22e480..0000000000 --- a/libs/partners/postgres/langchain_postgres/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -from importlib import metadata - -from langchain_postgres.chat_message_histories import PostgresChatMessageHistory -from langchain_postgres.checkpoint import ( - CheckpointSerializer, - PickleCheckpointSerializer, - PostgresCheckpoint, -) - -try: - __version__ = metadata.version(__package__) -except metadata.PackageNotFoundError: - # Case where package metadata is not available. - __version__ = "" - -__all__ = [ - "__version__", - "CheckpointSerializer", - "PostgresChatMessageHistory", - "PostgresCheckpoint", - "PickleCheckpointSerializer", -] diff --git a/libs/partners/postgres/langchain_postgres/_utils.py b/libs/partners/postgres/langchain_postgres/_utils.py deleted file mode 100644 index 9d8055af7a..0000000000 --- a/libs/partners/postgres/langchain_postgres/_utils.py +++ /dev/null @@ -1,82 +0,0 @@ -"""Copied over from langchain_community. - -This code should be moved to langchain proper or removed entirely. -""" - -import logging -from typing import List, Union - -import numpy as np - -logger = logging.getLogger(__name__) - -Matrix = Union[List[List[float]], List[np.ndarray], np.ndarray] - - -def cosine_similarity(X: Matrix, Y: Matrix) -> np.ndarray: - """Row-wise cosine similarity between two equal-width matrices.""" - if len(X) == 0 or len(Y) == 0: - return np.array([]) - - X = np.array(X) - Y = np.array(Y) - if X.shape[1] != Y.shape[1]: - raise ValueError( - f"Number of columns in X and Y must be the same. X has shape {X.shape} " - f"and Y has shape {Y.shape}." - ) - try: - import simsimd as simd # type: ignore - - X = np.array(X, dtype=np.float32) - Y = np.array(Y, dtype=np.float32) - Z = 1 - simd.cdist(X, Y, metric="cosine") - if isinstance(Z, float): - return np.array([Z]) - return np.array(Z) - except ImportError: - logger.debug( - "Unable to import simsimd, defaulting to NumPy implementation. If you want " - "to use simsimd please install with `pip install simsimd`." - ) - X_norm = np.linalg.norm(X, axis=1) - Y_norm = np.linalg.norm(Y, axis=1) - # Ignore divide by zero errors run time warnings as those are handled below. - with np.errstate(divide="ignore", invalid="ignore"): - similarity = np.dot(X, Y.T) / np.outer(X_norm, Y_norm) - similarity[np.isnan(similarity) | np.isinf(similarity)] = 0.0 - return similarity - - -def maximal_marginal_relevance( - query_embedding: np.ndarray, - embedding_list: list, - lambda_mult: float = 0.5, - k: int = 4, -) -> List[int]: - """Calculate maximal marginal relevance.""" - if min(k, len(embedding_list)) <= 0: - return [] - if query_embedding.ndim == 1: - query_embedding = np.expand_dims(query_embedding, axis=0) - similarity_to_query = cosine_similarity(query_embedding, embedding_list)[0] - most_similar = int(np.argmax(similarity_to_query)) - idxs = [most_similar] - selected = np.array([embedding_list[most_similar]]) - while len(idxs) < min(k, len(embedding_list)): - best_score = -np.inf - idx_to_add = -1 - similarity_to_selected = cosine_similarity(embedding_list, selected) - for i, query_score in enumerate(similarity_to_query): - if i in idxs: - continue - redundant_score = max(similarity_to_selected[i]) - equation_score = ( - lambda_mult * query_score - (1 - lambda_mult) * redundant_score - ) - if equation_score > best_score: - best_score = equation_score - idx_to_add = i - idxs.append(idx_to_add) - selected = np.append(selected, [embedding_list[idx_to_add]], axis=0) - return idxs diff --git a/libs/partners/postgres/langchain_postgres/chat_message_histories.py b/libs/partners/postgres/langchain_postgres/chat_message_histories.py deleted file mode 100644 index 54674ca875..0000000000 --- a/libs/partners/postgres/langchain_postgres/chat_message_histories.py +++ /dev/null @@ -1,372 +0,0 @@ -"""Client for persisting chat message history in a Postgres database. - -This client provides support for both sync and async via psycopg 3. -""" -from __future__ import annotations - -import json -import logging -import re -import uuid -from typing import List, Optional, Sequence - -import psycopg -from langchain_core.chat_history import BaseChatMessageHistory -from langchain_core.messages import BaseMessage, message_to_dict, messages_from_dict -from psycopg import sql - -logger = logging.getLogger(__name__) - - -def _create_table_and_index(table_name: str) -> List[sql.Composed]: - """Make a SQL query to create a table.""" - index_name = f"idx_{table_name}_session_id" - statements = [ - sql.SQL( - """ - CREATE TABLE IF NOT EXISTS {table_name} ( - id SERIAL PRIMARY KEY, - session_id UUID NOT NULL, - message JSONB NOT NULL, - created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() - ); - """ - ).format(table_name=sql.Identifier(table_name)), - sql.SQL( - """ - CREATE INDEX IF NOT EXISTS {index_name} ON {table_name} (session_id); - """ - ).format( - table_name=sql.Identifier(table_name), index_name=sql.Identifier(index_name) - ), - ] - return statements - - -def _get_messages_query(table_name: str) -> sql.Composed: - """Make a SQL query to get messages for a given session.""" - return sql.SQL( - "SELECT message " - "FROM {table_name} " - "WHERE session_id = %(session_id)s " - "ORDER BY id;" - ).format(table_name=sql.Identifier(table_name)) - - -def _delete_by_session_id_query(table_name: str) -> sql.Composed: - """Make a SQL query to delete messages for a given session.""" - return sql.SQL( - "DELETE FROM {table_name} WHERE session_id = %(session_id)s;" - ).format(table_name=sql.Identifier(table_name)) - - -def _delete_table_query(table_name: str) -> sql.Composed: - """Make a SQL query to delete a table.""" - return sql.SQL("DROP TABLE IF EXISTS {table_name};").format( - table_name=sql.Identifier(table_name) - ) - - -def _insert_message_query(table_name: str) -> sql.Composed: - """Make a SQL query to insert a message.""" - return sql.SQL( - "INSERT INTO {table_name} (session_id, message) VALUES (%s, %s)" - ).format(table_name=sql.Identifier(table_name)) - - -class PostgresChatMessageHistory(BaseChatMessageHistory): - def __init__( - self, - table_name: str, - session_id: str, - /, - *, - sync_connection: Optional[psycopg.Connection] = None, - async_connection: Optional[psycopg.AsyncConnection] = None, - ) -> None: - """Client for persisting chat message history in a Postgres database, - - This client provides support for both sync and async via psycopg >=3. - - The client can create schema in the database and provides methods to - add messages, get messages, and clear the chat message history. - - The schema has the following columns: - - - id: A serial primary key. - - session_id: The session ID for the chat message history. - - message: The JSONB message content. - - created_at: The timestamp of when the message was created. - - Messages are retrieved for a given session_id and are sorted by - the id (which should be increasing monotonically), and correspond - to the order in which the messages were added to the history. - - The "created_at" column is not returned by the interface, but - has been added for the schema so the information is available in the database. - - A session_id can be used to separate different chat histories in the same table, - the session_id should be provided when initializing the client. - - This chat history client takes in a psycopg connection object (either - Connection or AsyncConnection) and uses it to interact with the database. - - This design allows to reuse the underlying connection object across - multiple instantiations of this class, making instantiation fast. - - This chat history client is designed for prototyping applications that - involve chat and are based on Postgres. - - As your application grows, you will likely need to extend the schema to - handle more complex queries. For example, a chat application - may involve multiple tables like a user table, a table for storing - chat sessions / conversations, and this table for storing chat messages - for a given session. The application will require access to additional - endpoints like deleting messages by user id, listing conversations by - user id or ordering them based on last message time, etc. - - Feel free to adapt this implementation to suit your application's needs. - - Args: - session_id: The session ID to use for the chat message history - table_name: The name of the database table to use - sync_connection: An existing psycopg connection instance - async_connection: An existing psycopg async connection instance - - Usage: - - Use the create_schema or acreate_schema method to set up the table - schema in the database. - - Initialize the class with the appropriate session ID, table name, - and database connection. - - Add messages to the database using add_messages or aadd_messages. - - Retrieve messages with get_messages or aget_messages. - - Clear the session history with clear or aclear when needed. - - Note: - - At least one of sync_connection or async_connection must be provided. - - Examples: - - .. code-block:: python - - import uuid - - from langchain_core.messages import SystemMessage, AIMessage, HumanMessage - from langchain_postgres import PostgresChatMessageHistory - import psycopg - - # Establish a synchronous connection to the database - # (or use psycopg.AsyncConnection for async) - sync_connection = psycopg2.connect(conn_info) - - # Create the table schema (only needs to be done once) - table_name = "chat_history" - PostgresChatMessageHistory.create_schema(sync_connection, table_name) - - session_id = str(uuid.uuid4()) - - # Initialize the chat history manager - chat_history = PostgresChatMessageHistory( - table_name, - session_id, - sync_connection=sync_connection - ) - - # Add messages to the chat history - chat_history.add_messages([ - SystemMessage(content="Meow"), - AIMessage(content="woof"), - HumanMessage(content="bark"), - ]) - - print(chat_history.messages) - """ - if not sync_connection and not async_connection: - raise ValueError("Must provide sync_connection or async_connection") - - self._connection = sync_connection - self._aconnection = async_connection - - # Validate that session id is a UUID - try: - uuid.UUID(session_id) - except ValueError: - raise ValueError( - f"Invalid session id. Session id must be a valid UUID. Got {session_id}" - ) - - self._session_id = session_id - - if not re.match(r"^\w+$", table_name): - raise ValueError( - "Invalid table name. Table name must contain only alphanumeric " - "characters and underscores." - ) - self._table_name = table_name - - @staticmethod - def create_schema( - connection: psycopg.Connection, - table_name: str, - /, - ) -> None: - """Create the table schema in the database and create relevant indexes.""" - queries = _create_table_and_index(table_name) - logger.info("Creating schema for table %s", table_name) - with connection.cursor() as cursor: - for query in queries: - cursor.execute(query) - connection.commit() - - @staticmethod - async def acreate_schema( - connection: psycopg.AsyncConnection, table_name: str, / - ) -> None: - """Create the table schema in the database and create relevant indexes.""" - queries = _create_table_and_index(table_name) - logger.info("Creating schema for table %s", table_name) - async with connection.cursor() as cur: - for query in queries: - await cur.execute(query) - await connection.commit() - - @staticmethod - def drop_table(connection: psycopg.Connection, table_name: str, /) -> None: - """Delete the table schema in the database. - - WARNING: - This will delete the given table from the database including - all the database in the table and the schema of the table. - - Args: - connection: The database connection. - table_name: The name of the table to create. - """ - - query = _delete_table_query(table_name) - logger.info("Dropping table %s", table_name) - with connection.cursor() as cursor: - cursor.execute(query) - connection.commit() - - @staticmethod - async def adrop_table( - connection: psycopg.AsyncConnection, table_name: str, / - ) -> None: - """Delete the table schema in the database. - - WARNING: - This will delete the given table from the database including - all the database in the table and the schema of the table. - - Args: - connection: Async database connection. - table_name: The name of the table to create. - """ - query = _delete_table_query(table_name) - logger.info("Dropping table %s", table_name) - - async with connection.cursor() as acur: - await acur.execute(query) - await connection.commit() - - def add_messages(self, messages: Sequence[BaseMessage]) -> None: - """Add messages to the chat message history.""" - if self._connection is None: - raise ValueError( - "Please initialize the PostgresChatMessageHistory " - "with a sync connection or use the aadd_messages method instead." - ) - - values = [ - (self._session_id, json.dumps(message_to_dict(message))) - for message in messages - ] - - query = _insert_message_query(self._table_name) - - with self._connection.cursor() as cursor: - cursor.executemany(query, values) - self._connection.commit() - - async def aadd_messages(self, messages: Sequence[BaseMessage]) -> None: - """Add messages to the chat message history.""" - if self._aconnection is None: - raise ValueError( - "Please initialize the PostgresChatMessageHistory " - "with an async connection or use the sync add_messages method instead." - ) - - values = [ - (self._session_id, json.dumps(message_to_dict(message))) - for message in messages - ] - - query = _insert_message_query(self._table_name) - async with self._aconnection.cursor() as cursor: - await cursor.executemany(query, values) - await self._aconnection.commit() - - def get_messages(self) -> List[BaseMessage]: - """Retrieve messages from the chat message history.""" - if self._connection is None: - raise ValueError( - "Please initialize the PostgresChatMessageHistory " - "with a sync connection or use the async aget_messages method instead." - ) - - query = _get_messages_query(self._table_name) - - with self._connection.cursor() as cursor: - cursor.execute(query, {"session_id": self._session_id}) - items = [record[0] for record in cursor.fetchall()] - - messages = messages_from_dict(items) - return messages - - async def aget_messages(self) -> List[BaseMessage]: - """Retrieve messages from the chat message history.""" - if self._aconnection is None: - raise ValueError( - "Please initialize the PostgresChatMessageHistory " - "with an async connection or use the sync get_messages method instead." - ) - - query = _get_messages_query(self._table_name) - async with self._aconnection.cursor() as cursor: - await cursor.execute(query, {"session_id": self._session_id}) - items = [record[0] for record in await cursor.fetchall()] - - messages = messages_from_dict(items) - return messages - - @property # type: ignore[override] - def messages(self) -> List[BaseMessage]: - """The abstraction required a property.""" - return self.get_messages() - - def clear(self) -> None: - """Clear the chat message history for the GIVEN session.""" - if self._connection is None: - raise ValueError( - "Please initialize the PostgresChatMessageHistory " - "with a sync connection or use the async clear method instead." - ) - - query = _delete_by_session_id_query(self._table_name) - with self._connection.cursor() as cursor: - cursor.execute(query, {"session_id": self._session_id}) - self._connection.commit() - - async def aclear(self) -> None: - """Clear the chat message history for the GIVEN session.""" - if self._aconnection is None: - raise ValueError( - "Please initialize the PostgresChatMessageHistory " - "with an async connection or use the sync clear method instead." - ) - - query = _delete_by_session_id_query(self._table_name) - async with self._aconnection.cursor() as cursor: - await cursor.execute(query, {"session_id": self._session_id}) - await self._aconnection.commit() diff --git a/libs/partners/postgres/langchain_postgres/checkpoint.py b/libs/partners/postgres/langchain_postgres/checkpoint.py deleted file mode 100644 index 89a6972991..0000000000 --- a/libs/partners/postgres/langchain_postgres/checkpoint.py +++ /dev/null @@ -1,565 +0,0 @@ -"""Implementation of a langgraph checkpoint saver using Postgres.""" -import abc -import pickle -from contextlib import asynccontextmanager, contextmanager -from typing import AsyncGenerator, AsyncIterator, Generator, Optional, Union, cast - -import psycopg -from langchain_core.runnables import ConfigurableFieldSpec, RunnableConfig -from langgraph.checkpoint import BaseCheckpointSaver -from langgraph.checkpoint.base import Checkpoint, CheckpointThreadTs, CheckpointTuple -from psycopg_pool import AsyncConnectionPool, ConnectionPool - - -class CheckpointSerializer(abc.ABC): - """A serializer for serializing and deserializing objects to and from bytes.""" - - @abc.abstractmethod - def dumps(self, obj: Checkpoint) -> bytes: - """Serialize an object to bytes.""" - - @abc.abstractmethod - def loads(self, data: bytes) -> Checkpoint: - """Deserialize an object from bytes.""" - - -class PickleCheckpointSerializer(CheckpointSerializer): - """Use the pickle module to serialize and deserialize objects. - - This serializer uses the pickle module to serialize and deserialize objects. - - While pickling can serialize a wide range of Python objects, it may fail - de-serializable objects upon updates of the Python version or the python - environment (e.g., the object's class definition changes in LangGraph). - - *Security Warning*: The pickle module can deserialize malicious payloads, - only use this serializer with trusted data; e.g., data that you - have serialized yourself and can guarantee the integrity of. - """ - - def dumps(self, obj: Checkpoint) -> bytes: - """Serialize an object to bytes.""" - return pickle.dumps(obj) - - def loads(self, data: bytes) -> Checkpoint: - """Deserialize an object from bytes.""" - return cast(Checkpoint, pickle.loads(data)) - - -class PostgresCheckpoint(BaseCheckpointSaver): - """LangGraph checkpoint saver for Postgres. - - This implementation of a checkpoint saver uses a Postgres database to save - and retrieve checkpoints. It uses the psycopg3 package to interact with the - Postgres database. - - The checkpoint accepts either a sync_connection in the form of a psycopg.Connection - or a psycopg.ConnectionPool object, or an async_connection in the form of a - psycopg.AsyncConnection or psycopg.AsyncConnectionPool object. - - Usage: - - 1. First time use: create schema in the database using the `create_schema` method or - the async version `acreate_schema` method. - 2. Create a PostgresCheckpoint object with a serializer and an appropriate - connection object. - It's recommended to use a connection pool object for the connection. - If using a connection object, you are responsible for closing the connection - when done. - - Examples: - - - Sync usage with a connection pool: - - .. code-block:: python - - from psycopg_pool import ConnectionPool - from langchain_postgres import ( - PostgresCheckpoint, PickleCheckpointSerializer - ) - - pool = ConnectionPool( - # Example configuration - conninfo="postgresql://user:password@localhost:5432/dbname", - max_size=20, - ) - - # Uses the pickle module for serialization - # Make sure that you're only de-serializing trusted data - # (e.g., payloads that you have serialized yourself). - # Or implement a custom serializer. - checkpoint = PostgresCheckpoint( - serializer=PickleCheckpointSerializer(), - sync_connection=pool, - ) - - # Use the checkpoint object to put, get, list checkpoints, etc. - - - Async usage with a connection pool: - - .. code-block:: python - - from psycopg_pool import AsyncConnectionPool - from langchain_postgres import ( - PostgresCheckpoint, PickleCheckpointSerializer - ) - - pool = AsyncConnectionPool( - # Example configuration - conninfo="postgresql://user:password@localhost:5432/dbname", - max_size=20, - ) - - # Uses the pickle module for serialization - # Make sure that you're only de-serializing trusted data - # (e.g., payloads that you have serialized yourself). - # Or implement a custom serializer. - checkpoint = PostgresCheckpoint( - serializer=PickleCheckpointSerializer(), - async_connection=pool, - ) - - # Use the checkpoint object to put, get, list checkpoints, etc. - - - Async usage with a connection object: - - .. code-block:: python - - from psycopg import AsyncConnection - from langchain_postgres import ( - PostgresCheckpoint, PickleCheckpointSerializer - ) - - conninfo="postgresql://user:password@localhost:5432/dbname" - # Take care of closing the connection when done - async with AsyncConnection(conninfo=conninfo) as conn: - # Uses the pickle module for serialization - # Make sure that you're only de-serializing trusted data - # (e.g., payloads that you have serialized yourself). - # Or implement a custom serializer. - checkpoint = PostgresCheckpoint( - serializer=PickleCheckpointSerializer(), - async_connection=conn, - ) - - # Use the checkpoint object to put, get, list checkpoints, etc. - ... - """ - - serializer: CheckpointSerializer - """The serializer for serializing and deserializing objects to and from bytes.""" - - sync_connection: Optional[Union[psycopg.Connection, ConnectionPool]] = None - """The synchronous connection or pool to the Postgres database. - - If providing a connection object, please ensure that the connection is open - and remember to close the connection when done. - """ - async_connection: Optional[ - Union[psycopg.AsyncConnection, AsyncConnectionPool] - ] = None - """The asynchronous connection or pool to the Postgres database. - - If providing a connection object, please ensure that the connection is open - and remember to close the connection when done. - """ - - class Config: - arbitrary_types_allowed = True - extra = "forbid" - - @property - def config_specs(self) -> list[ConfigurableFieldSpec]: - """Return the configuration specs for this runnable.""" - return [ - ConfigurableFieldSpec( - id="thread_id", - annotation=Optional[str], - name="Thread ID", - description=None, - default=None, - is_shared=True, - ), - CheckpointThreadTs, - ] - - @contextmanager - def _get_sync_connection(self) -> Generator[psycopg.Connection, None, None]: - """Get the connection to the Postgres database.""" - if isinstance(self.sync_connection, psycopg.Connection): - yield self.sync_connection - elif isinstance(self.sync_connection, ConnectionPool): - with self.sync_connection.connection() as conn: - yield conn - else: - raise ValueError( - "Invalid sync connection object. Please initialize the check pointer " - f"with an appropriate sync connection object. " - f"Got {type(self.sync_connection)}." - ) - - @asynccontextmanager - async def _get_async_connection( - self, - ) -> AsyncGenerator[psycopg.AsyncConnection, None]: - """Get the connection to the Postgres database.""" - if isinstance(self.async_connection, psycopg.AsyncConnection): - yield self.async_connection - elif isinstance(self.async_connection, AsyncConnectionPool): - async with self.async_connection.connection() as conn: - yield conn - else: - raise ValueError( - "Invalid async connection object. Please initialize the check pointer " - f"with an appropriate async connection object. " - f"Got {type(self.async_connection)}." - ) - - @staticmethod - def create_schema(connection: psycopg.Connection, /) -> None: - """Create the schema for the checkpoint saver.""" - with connection.cursor() as cur: - cur.execute( - """ - CREATE TABLE IF NOT EXISTS checkpoints ( - thread_id TEXT NOT NULL, - checkpoint BYTEA NOT NULL, - thread_ts TIMESTAMPTZ NOT NULL, - parent_ts TIMESTAMPTZ, - PRIMARY KEY (thread_id, thread_ts) - ); - """ - ) - - @staticmethod - async def acreate_schema(connection: psycopg.AsyncConnection, /) -> None: - """Create the schema for the checkpoint saver.""" - async with connection.cursor() as cur: - await cur.execute( - """ - CREATE TABLE IF NOT EXISTS checkpoints ( - thread_id TEXT NOT NULL, - checkpoint BYTEA NOT NULL, - thread_ts TIMESTAMPTZ NOT NULL, - parent_ts TIMESTAMPTZ, - PRIMARY KEY (thread_id, thread_ts) - ); - """ - ) - - @staticmethod - def drop_schema(connection: psycopg.Connection, /) -> None: - """Drop the table for the checkpoint saver.""" - with connection.cursor() as cur: - cur.execute("DROP TABLE IF EXISTS checkpoints;") - - @staticmethod - async def adrop_schema(connection: psycopg.AsyncConnection, /) -> None: - """Drop the table for the checkpoint saver.""" - async with connection.cursor() as cur: - await cur.execute("DROP TABLE IF EXISTS checkpoints;") - - def put(self, config: RunnableConfig, checkpoint: Checkpoint) -> RunnableConfig: - """Put the checkpoint for the given configuration. - - Args: - config: The configuration for the checkpoint. - A dict with a `configurable` key which is a dict with - a `thread_id` key and an optional `thread_ts` key. - For example, { 'configurable': { 'thread_id': 'test_thread' } } - checkpoint: The checkpoint to persist. - - Returns: - The RunnableConfig that describes the checkpoint that was just created. - It'll contain the `thread_id` and `thread_ts` of the checkpoint. - """ - thread_id = config["configurable"]["thread_id"] - parent_ts = config["configurable"].get("thread_ts") - - with self._get_sync_connection() as conn: - with conn.cursor() as cur: - cur.execute( - """ - INSERT INTO checkpoints - (thread_id, thread_ts, parent_ts, checkpoint) - VALUES - (%(thread_id)s, %(thread_ts)s, %(parent_ts)s, %(checkpoint)s) - ON CONFLICT (thread_id, thread_ts) - DO UPDATE SET checkpoint = EXCLUDED.checkpoint; - """, - { - "thread_id": thread_id, - "thread_ts": checkpoint["ts"], - "parent_ts": parent_ts if parent_ts else None, - "checkpoint": self.serializer.dumps(checkpoint), - }, - ) - - return { - "configurable": { - "thread_id": thread_id, - "thread_ts": checkpoint["ts"], - }, - } - - async def aput( - self, config: RunnableConfig, checkpoint: Checkpoint - ) -> RunnableConfig: - """Put the checkpoint for the given configuration. - - Args: - config: The configuration for the checkpoint. - A dict with a `configurable` key which is a dict with - a `thread_id` key and an optional `thread_ts` key. - For example, { 'configurable': { 'thread_id': 'test_thread' } } - checkpoint: The checkpoint to persist. - - Returns: - The RunnableConfig that describes the checkpoint that was just created. - It'll contain the `thread_id` and `thread_ts` of the checkpoint. - """ - thread_id = config["configurable"]["thread_id"] - parent_ts = config["configurable"].get("thread_ts") - async with self._get_async_connection() as conn: - async with conn.cursor() as cur: - await cur.execute( - """ - INSERT INTO - checkpoints (thread_id, thread_ts, parent_ts, checkpoint) - VALUES - (%(thread_id)s, %(thread_ts)s, %(parent_ts)s, %(checkpoint)s) - ON CONFLICT (thread_id, thread_ts) - DO UPDATE SET checkpoint = EXCLUDED.checkpoint; - """, - { - "thread_id": thread_id, - "thread_ts": checkpoint["ts"], - "parent_ts": parent_ts if parent_ts else None, - "checkpoint": self.serializer.dumps(checkpoint), - }, - ) - - return { - "configurable": { - "thread_id": thread_id, - "thread_ts": checkpoint["ts"], - }, - } - - def list(self, config: RunnableConfig) -> Generator[CheckpointTuple, None, None]: - """Get all the checkpoints for the given configuration.""" - with self._get_sync_connection() as conn: - with conn.cursor() as cur: - thread_id = config["configurable"]["thread_id"] - cur.execute( - "SELECT checkpoint, thread_ts, parent_ts " - "FROM checkpoints " - "WHERE thread_id = %(thread_id)s " - "ORDER BY thread_ts DESC", - { - "thread_id": thread_id, - }, - ) - for value in cur: - yield CheckpointTuple( - { - "configurable": { - "thread_id": thread_id, - "thread_ts": value[1].isoformat(), - } - }, - self.serializer.loads(value[0]), - { - "configurable": { - "thread_id": thread_id, - "thread_ts": value[2].isoformat(), - } - } - if value[2] - else None, - ) - - async def alist(self, config: RunnableConfig) -> AsyncIterator[CheckpointTuple]: - """Get all the checkpoints for the given configuration.""" - async with self._get_async_connection() as conn: - async with conn.cursor() as cur: - thread_id = config["configurable"]["thread_id"] - await cur.execute( - "SELECT checkpoint, thread_ts, parent_ts " - "FROM checkpoints " - "WHERE thread_id = %(thread_id)s " - "ORDER BY thread_ts DESC", - { - "thread_id": thread_id, - }, - ) - async for value in cur: - yield CheckpointTuple( - { - "configurable": { - "thread_id": thread_id, - "thread_ts": value[1].isoformat(), - } - }, - self.serializer.loads(value[0]), - { - "configurable": { - "thread_id": thread_id, - "thread_ts": value[2].isoformat(), - } - } - if value[2] - else None, - ) - - def get_tuple(self, config: RunnableConfig) -> Optional[CheckpointTuple]: - """Get the checkpoint tuple for the given configuration. - - Args: - config: The configuration for the checkpoint. - A dict with a `configurable` key which is a dict with - a `thread_id` key and an optional `thread_ts` key. - For example, { 'configurable': { 'thread_id': 'test_thread' } } - - Returns: - The checkpoint tuple for the given configuration if it exists, - otherwise None. - - If thread_ts is None, the latest checkpoint is returned if it exists. - """ - thread_id = config["configurable"]["thread_id"] - thread_ts = config["configurable"].get("thread_ts") - with self._get_sync_connection() as conn: - with conn.cursor() as cur: - if thread_ts: - cur.execute( - "SELECT checkpoint, parent_ts " - "FROM checkpoints " - "WHERE thread_id = %(thread_id)s AND thread_ts = %(thread_ts)s", - { - "thread_id": thread_id, - "thread_ts": thread_ts, - }, - ) - value = cur.fetchone() - if value: - return CheckpointTuple( - config, - self.serializer.loads(value[0]), - { - "configurable": { - "thread_id": thread_id, - "thread_ts": value[1].isoformat(), - } - } - if value[1] - else None, - ) - else: - cur.execute( - "SELECT checkpoint, thread_ts, parent_ts " - "FROM checkpoints " - "WHERE thread_id = %(thread_id)s " - "ORDER BY thread_ts DESC LIMIT 1", - { - "thread_id": thread_id, - }, - ) - value = cur.fetchone() - if value: - return CheckpointTuple( - config={ - "configurable": { - "thread_id": thread_id, - "thread_ts": value[1].isoformat(), - } - }, - checkpoint=self.serializer.loads(value[0]), - parent_config={ - "configurable": { - "thread_id": thread_id, - "thread_ts": value[2].isoformat(), - } - } - if value[2] - else None, - ) - return None - - async def aget_tuple(self, config: RunnableConfig) -> Optional[CheckpointTuple]: - """Get the checkpoint tuple for the given configuration. - - Args: - config: The configuration for the checkpoint. - A dict with a `configurable` key which is a dict with - a `thread_id` key and an optional `thread_ts` key. - For example, { 'configurable': { 'thread_id': 'test_thread' } } - - Returns: - The checkpoint tuple for the given configuration if it exists, - otherwise None. - - If thread_ts is None, the latest checkpoint is returned if it exists. - """ - thread_id = config["configurable"]["thread_id"] - thread_ts = config["configurable"].get("thread_ts") - async with self._get_async_connection() as conn: - async with conn.cursor() as cur: - if thread_ts: - await cur.execute( - "SELECT checkpoint, parent_ts " - "FROM checkpoints " - "WHERE thread_id = %(thread_id)s AND thread_ts = %(thread_ts)s", - { - "thread_id": thread_id, - "thread_ts": thread_ts, - }, - ) - value = await cur.fetchone() - if value: - return CheckpointTuple( - config, - self.serializer.loads(value[0]), - { - "configurable": { - "thread_id": thread_id, - "thread_ts": value[1].isoformat(), - } - } - if value[1] - else None, - ) - else: - await cur.execute( - "SELECT checkpoint, thread_ts, parent_ts " - "FROM checkpoints " - "WHERE thread_id = %(thread_id)s " - "ORDER BY thread_ts DESC LIMIT 1", - { - "thread_id": thread_id, - }, - ) - value = await cur.fetchone() - if value: - return CheckpointTuple( - config={ - "configurable": { - "thread_id": thread_id, - "thread_ts": value[1].isoformat(), - } - }, - checkpoint=self.serializer.loads(value[0]), - parent_config={ - "configurable": { - "thread_id": thread_id, - "thread_ts": value[2].isoformat(), - } - } - if value[2] - else None, - ) - - return None diff --git a/libs/partners/postgres/langchain_postgres/py.typed b/libs/partners/postgres/langchain_postgres/py.typed deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/libs/partners/postgres/langchain_postgres/vectorstores.py b/libs/partners/postgres/langchain_postgres/vectorstores.py deleted file mode 100644 index 6750fe7a25..0000000000 --- a/libs/partners/postgres/langchain_postgres/vectorstores.py +++ /dev/null @@ -1,1349 +0,0 @@ -from __future__ import annotations - -import contextlib -import enum -import logging -import uuid -from typing import ( - Any, - Callable, - Dict, - Generator, - Iterable, - List, - Optional, - Tuple, - Type, -) - -import numpy as np -import sqlalchemy -from langchain_core._api import warn_deprecated -from sqlalchemy import SQLColumnExpression, cast, delete, func -from sqlalchemy.dialects.postgresql import JSON, JSONB, JSONPATH, UUID -from sqlalchemy.orm import Session, relationship - -try: - from sqlalchemy.orm import declarative_base -except ImportError: - from sqlalchemy.ext.declarative import declarative_base - -from langchain_core.documents import Document -from langchain_core.embeddings import Embeddings -from langchain_core.runnables.config import run_in_executor -from langchain_core.utils import get_from_dict_or_env -from langchain_core.vectorstores import VectorStore - -from langchain_postgres._utils import maximal_marginal_relevance - - -class DistanceStrategy(str, enum.Enum): - """Enumerator of the Distance strategies.""" - - EUCLIDEAN = "l2" - COSINE = "cosine" - MAX_INNER_PRODUCT = "inner" - - -DEFAULT_DISTANCE_STRATEGY = DistanceStrategy.COSINE - -Base = declarative_base() # type: Any - - -_LANGCHAIN_DEFAULT_COLLECTION_NAME = "langchain" - - -class BaseModel(Base): - """Base model for the SQL stores.""" - - __abstract__ = True - uuid = sqlalchemy.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) - - -_classes: Any = None - -COMPARISONS_TO_NATIVE = { - "$eq": "==", - "$ne": "!=", - "$lt": "<", - "$lte": "<=", - "$gt": ">", - "$gte": ">=", -} - -SPECIAL_CASED_OPERATORS = { - "$in", - "$nin", - "$between", -} - -TEXT_OPERATORS = { - "$like", - "$ilike", -} - -LOGICAL_OPERATORS = {"$and", "$or"} - -SUPPORTED_OPERATORS = ( - set(COMPARISONS_TO_NATIVE) - .union(TEXT_OPERATORS) - .union(LOGICAL_OPERATORS) - .union(SPECIAL_CASED_OPERATORS) -) - - -def _get_embedding_collection_store( - vector_dimension: Optional[int] = None, *, use_jsonb: bool = True -) -> Any: - global _classes - if _classes is not None: - return _classes - - from pgvector.sqlalchemy import Vector # type: ignore - - class CollectionStore(BaseModel): - """Collection store.""" - - __tablename__ = "langchain_pg_collection" - - name = sqlalchemy.Column(sqlalchemy.String) - cmetadata = sqlalchemy.Column(JSON) - - embeddings = relationship( - "EmbeddingStore", - back_populates="collection", - passive_deletes=True, - ) - - @classmethod - def get_by_name( - cls, session: Session, name: str - ) -> Optional["CollectionStore"]: - return session.query(cls).filter(cls.name == name).first() # type: ignore - - @classmethod - def get_or_create( - cls, - session: Session, - name: str, - cmetadata: Optional[dict] = None, - ) -> Tuple["CollectionStore", bool]: - """ - Get or create a collection. - Returns [Collection, bool] where the bool is True if the collection was created. - """ # noqa: E501 - created = False - collection = cls.get_by_name(session, name) - if collection: - return collection, created - - collection = cls(name=name, cmetadata=cmetadata) - session.add(collection) - session.commit() - created = True - return collection, created - - if use_jsonb: - # TODO(PRIOR TO LANDING): Create a gin index on the cmetadata field - class EmbeddingStore(BaseModel): - """Embedding store.""" - - __tablename__ = "langchain_pg_embedding" - - collection_id = sqlalchemy.Column( - UUID(as_uuid=True), - sqlalchemy.ForeignKey( - f"{CollectionStore.__tablename__}.uuid", - ondelete="CASCADE", - ), - ) - collection = relationship(CollectionStore, back_populates="embeddings") - - embedding: Vector = sqlalchemy.Column(Vector(vector_dimension)) - document = sqlalchemy.Column(sqlalchemy.String, nullable=True) - cmetadata = sqlalchemy.Column(JSONB, nullable=True) - - # custom_id : any user defined id - custom_id = sqlalchemy.Column(sqlalchemy.String, nullable=True) - - __table_args__ = ( - sqlalchemy.Index( - "ix_cmetadata_gin", - "cmetadata", - postgresql_using="gin", - postgresql_ops={"cmetadata": "jsonb_path_ops"}, - ), - ) - else: - # For backwards comaptibilty with older versions of pgvector - # This should be removed in the future (remove during migration) - class EmbeddingStore(BaseModel): # type: ignore[no-redef] - """Embedding store.""" - - __tablename__ = "langchain_pg_embedding" - - collection_id = sqlalchemy.Column( - UUID(as_uuid=True), - sqlalchemy.ForeignKey( - f"{CollectionStore.__tablename__}.uuid", - ondelete="CASCADE", - ), - ) - collection = relationship(CollectionStore, back_populates="embeddings") - - embedding: Vector = sqlalchemy.Column(Vector(vector_dimension)) - document = sqlalchemy.Column(sqlalchemy.String, nullable=True) - cmetadata = sqlalchemy.Column(JSON, nullable=True) - - # custom_id : any user defined id - custom_id = sqlalchemy.Column(sqlalchemy.String, nullable=True) - - _classes = (EmbeddingStore, CollectionStore) - - return _classes - - -def _results_to_docs(docs_and_scores: Any) -> List[Document]: - """Return docs from docs and scores.""" - return [doc for doc, _ in docs_and_scores] - - -class PGVector(VectorStore): - """`Postgres`/`PGVector` vector store. - - To use, you should have the ``pgvector`` python package installed. - - Example: - .. code-block:: python - - from langchain_postgres.vectorstores import PGVector - from langchain_community.embeddings.openai import OpenAIEmbeddings - - CONNECTION_STRING = "postgresql+psycopg2://hwc@localhost:5432/test3" - COLLECTION_NAME = "state_of_the_union_test" - embeddings = OpenAIEmbeddings() - vectorestore = PGVector.from_documents( - embedding=embeddings, - documents=docs, - collection_name=COLLECTION_NAME, - connection_string=CONNECTION_STRING, - use_jsonb=True, - ) - """ - - def __init__( - self, - connection_string: str, - embedding_function: Embeddings, - embedding_length: Optional[int] = None, - collection_name: str = _LANGCHAIN_DEFAULT_COLLECTION_NAME, - collection_metadata: Optional[dict] = None, - distance_strategy: DistanceStrategy = DEFAULT_DISTANCE_STRATEGY, - pre_delete_collection: bool = False, - logger: Optional[logging.Logger] = None, - relevance_score_fn: Optional[Callable[[float], float]] = None, - *, - connection: Optional[sqlalchemy.engine.Connection] = None, - engine_args: Optional[dict[str, Any]] = None, - use_jsonb: bool = False, - create_extension: bool = True, - ) -> None: - """Initialize the PGVector store. - - Args: - connection_string: Postgres connection string. - embedding_function: Any embedding function implementing - `langchain.embeddings.base.Embeddings` interface. - embedding_length: The length of the embedding vector. (default: None) - NOTE: This is not mandatory. Defining it will prevent vectors of - any other size to be added to the embeddings table but, without it, - the embeddings can't be indexed. - collection_name: The name of the collection to use. (default: langchain) - NOTE: This is not the name of the table, but the name of the collection. - The tables will be created when initializing the store (if not exists) - So, make sure the user has the right permissions to create tables. - distance_strategy: The distance strategy to use. (default: COSINE) - pre_delete_collection: If True, will delete the collection if it exists. - (default: False). Useful for testing. - engine_args: SQLAlchemy's create engine arguments. - use_jsonb: Use JSONB instead of JSON for metadata. (default: True) - Strongly discouraged from using JSON as it's not as efficient - for querying. - It's provided here for backwards compatibility with older versions, - and will be removed in the future. - create_extension: If True, will create the vector extension if it - doesn't exist. disabling creation is useful when using ReadOnly - Databases. - """ - self.connection_string = connection_string - self.embedding_function = embedding_function - self._embedding_length = embedding_length - self.collection_name = collection_name - self.collection_metadata = collection_metadata - self._distance_strategy = distance_strategy - self.pre_delete_collection = pre_delete_collection - self.logger = logger or logging.getLogger(__name__) - self.override_relevance_score_fn = relevance_score_fn - self.engine_args = engine_args or {} - self._bind = connection if connection else self._create_engine() - self.use_jsonb = use_jsonb - self.create_extension = create_extension - - if not use_jsonb: - # Replace with a deprecation warning. - warn_deprecated( - "0.0.29", - pending=True, - message=( - "Please use JSONB instead of JSON for metadata. " - "This change will allow for more efficient querying that " - "involves filtering based on metadata." - "Please note that filtering operators have been changed " - "when using JSOB metadata to be prefixed with a $ sign " - "to avoid name collisions with columns. " - "If you're using an existing database, you will need to create a" - "db migration for your metadata column to be JSONB and update your " - "queries to use the new operators. " - ), - alternative=( - "Instantiate with use_jsonb=True to use JSONB instead " - "of JSON for metadata." - ), - ) - self.__post_init__() - - def __post_init__( - self, - ) -> None: - """Initialize the store.""" - if self.create_extension: - self.create_vector_extension() - - EmbeddingStore, CollectionStore = _get_embedding_collection_store( - self._embedding_length, use_jsonb=self.use_jsonb - ) - self.CollectionStore = CollectionStore - self.EmbeddingStore = EmbeddingStore - self.create_tables_if_not_exists() - self.create_collection() - - def __del__(self) -> None: - if isinstance(self._bind, sqlalchemy.engine.Connection): - self._bind.close() - - @property - def embeddings(self) -> Embeddings: - return self.embedding_function - - def _create_engine(self) -> sqlalchemy.engine.Engine: - return sqlalchemy.create_engine(url=self.connection_string, **self.engine_args) - - def create_vector_extension(self) -> None: - try: - with Session(self._bind) as session: # type: ignore[arg-type] - # The advisor lock fixes issue arising from concurrent - # creation of the vector extension. - # https://github.com/langchain-ai/langchain/issues/12933 - # For more information see: - # https://www.postgresql.org/docs/16/explicit-locking.html#ADVISORY-LOCKS - statement = sqlalchemy.text( - "BEGIN;" - "SELECT pg_advisory_xact_lock(1573678846307946496);" - "CREATE EXTENSION IF NOT EXISTS vector;" - "COMMIT;" - ) - session.execute(statement) - session.commit() - except Exception as e: - raise Exception(f"Failed to create vector extension: {e}") from e - - def create_tables_if_not_exists(self) -> None: - with Session(self._bind) as session, session.begin(): # type: ignore[arg-type] - Base.metadata.create_all(session.get_bind()) - - def drop_tables(self) -> None: - with Session(self._bind) as session, session.begin(): # type: ignore[arg-type] - Base.metadata.drop_all(session.get_bind()) - - def create_collection(self) -> None: - if self.pre_delete_collection: - self.delete_collection() - with Session(self._bind) as session: # type: ignore[arg-type] - self.CollectionStore.get_or_create( - session, self.collection_name, cmetadata=self.collection_metadata - ) - - def delete_collection(self) -> None: - self.logger.debug("Trying to delete collection") - with Session(self._bind) as session: # type: ignore[arg-type] - collection = self.get_collection(session) - if not collection: - self.logger.warning("Collection not found") - return - session.delete(collection) - session.commit() - - @contextlib.contextmanager - def _make_session(self) -> Generator[Session, None, None]: - """Create a context manager for the session, bind to _conn string.""" - yield Session(self._bind) # type: ignore[arg-type] - - def delete( - self, - ids: Optional[List[str]] = None, - collection_only: bool = False, - **kwargs: Any, - ) -> None: - """Delete vectors by ids or uuids. - - Args: - ids: List of ids to delete. - collection_only: Only delete ids in the collection. - """ - with Session(self._bind) as session: # type: ignore[arg-type] - if ids is not None: - self.logger.debug( - "Trying to delete vectors by ids (represented by the model " - "using the custom ids field)" - ) - - stmt = delete(self.EmbeddingStore) - - if collection_only: - collection = self.get_collection(session) - if not collection: - self.logger.warning("Collection not found") - return - - stmt = stmt.where( - self.EmbeddingStore.collection_id == collection.uuid - ) - - stmt = stmt.where(self.EmbeddingStore.custom_id.in_(ids)) - session.execute(stmt) - session.commit() - - def get_collection(self, session: Session) -> Any: - return self.CollectionStore.get_by_name(session, self.collection_name) - - @classmethod - def __from( - cls, - texts: List[str], - embeddings: List[List[float]], - embedding: Embeddings, - metadatas: Optional[List[dict]] = None, - ids: Optional[List[str]] = None, - collection_name: str = _LANGCHAIN_DEFAULT_COLLECTION_NAME, - distance_strategy: DistanceStrategy = DEFAULT_DISTANCE_STRATEGY, - connection_string: Optional[str] = None, - pre_delete_collection: bool = False, - *, - use_jsonb: bool = False, - **kwargs: Any, - ) -> PGVector: - if ids is None: - ids = [str(uuid.uuid1()) for _ in texts] - - if not metadatas: - metadatas = [{} for _ in texts] - if connection_string is None: - connection_string = cls.get_connection_string(kwargs) - - store = cls( - connection_string=connection_string, - collection_name=collection_name, - embedding_function=embedding, - distance_strategy=distance_strategy, - pre_delete_collection=pre_delete_collection, - use_jsonb=use_jsonb, - **kwargs, - ) - - store.add_embeddings( - texts=texts, embeddings=embeddings, metadatas=metadatas, ids=ids, **kwargs - ) - - return store - - def add_embeddings( - self, - texts: Iterable[str], - embeddings: List[List[float]], - metadatas: Optional[List[dict]] = None, - ids: Optional[List[str]] = None, - **kwargs: Any, - ) -> List[str]: - """Add embeddings to the vectorstore. - - Args: - texts: Iterable of strings to add to the vectorstore. - embeddings: List of list of embedding vectors. - metadatas: List of metadatas associated with the texts. - kwargs: vectorstore specific parameters - """ - if ids is None: - ids = [str(uuid.uuid1()) for _ in texts] - - if not metadatas: - metadatas = [{} for _ in texts] - - with Session(self._bind) as session: # type: ignore[arg-type] - collection = self.get_collection(session) - if not collection: - raise ValueError("Collection not found") - documents = [] - for text, metadata, embedding, id in zip(texts, metadatas, embeddings, ids): - embedding_store = self.EmbeddingStore( - embedding=embedding, - document=text, - cmetadata=metadata, - custom_id=id, - collection_id=collection.uuid, - ) - documents.append(embedding_store) - session.bulk_save_objects(documents) - session.commit() - - return ids - - def add_texts( - self, - texts: Iterable[str], - metadatas: Optional[List[dict]] = None, - ids: Optional[List[str]] = None, - **kwargs: Any, - ) -> List[str]: - """Run more texts through the embeddings and add to the vectorstore. - - Args: - texts: Iterable of strings to add to the vectorstore. - metadatas: Optional list of metadatas associated with the texts. - kwargs: vectorstore specific parameters - - Returns: - List of ids from adding the texts into the vectorstore. - """ - embeddings = self.embedding_function.embed_documents(list(texts)) - return self.add_embeddings( - texts=texts, embeddings=embeddings, metadatas=metadatas, ids=ids, **kwargs - ) - - def similarity_search( - self, - query: str, - k: int = 4, - filter: Optional[dict] = None, - **kwargs: Any, - ) -> List[Document]: - """Run similarity search with PGVector with distance. - - Args: - query (str): Query text to search for. - k (int): Number of results to return. Defaults to 4. - filter (Optional[Dict[str, str]]): Filter by metadata. Defaults to None. - - Returns: - List of Documents most similar to the query. - """ - embedding = self.embedding_function.embed_query(text=query) - return self.similarity_search_by_vector( - embedding=embedding, - k=k, - filter=filter, - ) - - def similarity_search_with_score( - self, - query: str, - k: int = 4, - filter: Optional[dict] = None, - ) -> List[Tuple[Document, float]]: - """Return docs most similar to query. - - Args: - query: Text to look up documents similar to. - k: Number of Documents to return. Defaults to 4. - filter (Optional[Dict[str, str]]): Filter by metadata. Defaults to None. - - Returns: - List of Documents most similar to the query and score for each. - """ - embedding = self.embedding_function.embed_query(query) - docs = self.similarity_search_with_score_by_vector( - embedding=embedding, k=k, filter=filter - ) - return docs - - @property - def distance_strategy(self) -> Any: - if self._distance_strategy == DistanceStrategy.EUCLIDEAN: - return self.EmbeddingStore.embedding.l2_distance - elif self._distance_strategy == DistanceStrategy.COSINE: - return self.EmbeddingStore.embedding.cosine_distance - elif self._distance_strategy == DistanceStrategy.MAX_INNER_PRODUCT: - return self.EmbeddingStore.embedding.max_inner_product - else: - raise ValueError( - f"Got unexpected value for distance: {self._distance_strategy}. " - f"Should be one of {', '.join([ds.value for ds in DistanceStrategy])}." - ) - - def similarity_search_with_score_by_vector( - self, - embedding: List[float], - k: int = 4, - filter: Optional[dict] = None, - ) -> List[Tuple[Document, float]]: - results = self.__query_collection(embedding=embedding, k=k, filter=filter) - - return self._results_to_docs_and_scores(results) - - def _results_to_docs_and_scores(self, results: Any) -> List[Tuple[Document, float]]: - """Return docs and scores from results.""" - docs = [ - ( - Document( - page_content=result.EmbeddingStore.document, - metadata=result.EmbeddingStore.cmetadata, - ), - result.distance if self.embedding_function is not None else None, - ) - for result in results - ] - return docs - - def _handle_field_filter( - self, - field: str, - value: Any, - ) -> SQLColumnExpression: - """Create a filter for a specific field. - - Args: - field: name of field - value: value to filter - If provided as is then this will be an equality filter - If provided as a dictionary then this will be a filter, the key - will be the operator and the value will be the value to filter by - - Returns: - sqlalchemy expression - """ - if not isinstance(field, str): - raise ValueError( - f"field should be a string but got: {type(field)} with value: {field}" - ) - - if field.startswith("$"): - raise ValueError( - f"Invalid filter condition. Expected a field but got an operator: " - f"{field}" - ) - - # Allow [a-zA-Z0-9_], disallow $ for now until we support escape characters - if not field.isidentifier(): - raise ValueError( - f"Invalid field name: {field}. Expected a valid identifier." - ) - - if isinstance(value, dict): - # This is a filter specification - if len(value) != 1: - raise ValueError( - "Invalid filter condition. Expected a value which " - "is a dictionary with a single key that corresponds to an operator " - f"but got a dictionary with {len(value)} keys. The first few " - f"keys are: {list(value.keys())[:3]}" - ) - operator, filter_value = list(value.items())[0] - # Verify that that operator is an operator - if operator not in SUPPORTED_OPERATORS: - raise ValueError( - f"Invalid operator: {operator}. " - f"Expected one of {SUPPORTED_OPERATORS}" - ) - else: # Then we assume an equality operator - operator = "$eq" - filter_value = value - - if operator in COMPARISONS_TO_NATIVE: - # Then we implement an equality filter - # native is trusted input - native = COMPARISONS_TO_NATIVE[operator] - return func.jsonb_path_match( - self.EmbeddingStore.cmetadata, - cast(f"$.{field} {native} $value", JSONPATH), - cast({"value": filter_value}, JSONB), - ) - elif operator == "$between": - # Use AND with two comparisons - low, high = filter_value - - lower_bound = func.jsonb_path_match( - self.EmbeddingStore.cmetadata, - cast(f"$.{field} >= $value", JSONPATH), - cast({"value": low}, JSONB), - ) - upper_bound = func.jsonb_path_match( - self.EmbeddingStore.cmetadata, - cast(f"$.{field} <= $value", JSONPATH), - cast({"value": high}, JSONB), - ) - return sqlalchemy.and_(lower_bound, upper_bound) - elif operator in {"$in", "$nin", "$like", "$ilike"}: - # We'll do force coercion to text - if operator in {"$in", "$nin"}: - for val in filter_value: - if not isinstance(val, (str, int, float)): - raise NotImplementedError( - f"Unsupported type: {type(val)} for value: {val}" - ) - - queried_field = self.EmbeddingStore.cmetadata[field].astext - - if operator in {"$in"}: - return queried_field.in_([str(val) for val in filter_value]) - elif operator in {"$nin"}: - return queried_field.nin_([str(val) for val in filter_value]) - elif operator in {"$like"}: - return queried_field.like(filter_value) - elif operator in {"$ilike"}: - return queried_field.ilike(filter_value) - else: - raise NotImplementedError() - else: - raise NotImplementedError() - - def _create_filter_clause_deprecated(self, key, value): # type: ignore[no-untyped-def] - """Deprecated functionality. - - This is for backwards compatibility with the JSON based schema for metadata. - It uses incorrect operator syntax (operators are not prefixed with $). - - This implementation is not efficient, and has bugs associated with - the way that it handles numeric filter clauses. - """ - IN, NIN, BETWEEN, GT, LT, NE = "in", "nin", "between", "gt", "lt", "ne" - EQ, LIKE, CONTAINS, OR, AND = "eq", "like", "contains", "or", "and" - - value_case_insensitive = {k.lower(): v for k, v in value.items()} - if IN in map(str.lower, value): - filter_by_metadata = self.EmbeddingStore.cmetadata[key].astext.in_( - value_case_insensitive[IN] - ) - elif NIN in map(str.lower, value): - filter_by_metadata = self.EmbeddingStore.cmetadata[key].astext.not_in( - value_case_insensitive[NIN] - ) - elif BETWEEN in map(str.lower, value): - filter_by_metadata = self.EmbeddingStore.cmetadata[key].astext.between( - str(value_case_insensitive[BETWEEN][0]), - str(value_case_insensitive[BETWEEN][1]), - ) - elif GT in map(str.lower, value): - filter_by_metadata = self.EmbeddingStore.cmetadata[key].astext > str( - value_case_insensitive[GT] - ) - elif LT in map(str.lower, value): - filter_by_metadata = self.EmbeddingStore.cmetadata[key].astext < str( - value_case_insensitive[LT] - ) - elif NE in map(str.lower, value): - filter_by_metadata = self.EmbeddingStore.cmetadata[key].astext != str( - value_case_insensitive[NE] - ) - elif EQ in map(str.lower, value): - filter_by_metadata = self.EmbeddingStore.cmetadata[key].astext == str( - value_case_insensitive[EQ] - ) - elif LIKE in map(str.lower, value): - filter_by_metadata = self.EmbeddingStore.cmetadata[key].astext.like( - value_case_insensitive[LIKE] - ) - elif CONTAINS in map(str.lower, value): - filter_by_metadata = self.EmbeddingStore.cmetadata[key].astext.contains( - value_case_insensitive[CONTAINS] - ) - elif OR in map(str.lower, value): - or_clauses = [ - self._create_filter_clause(key, sub_value) - for sub_value in value_case_insensitive[OR] - ] - filter_by_metadata = sqlalchemy.or_(*or_clauses) - elif AND in map(str.lower, value): - and_clauses = [ - self._create_filter_clause(key, sub_value) - for sub_value in value_case_insensitive[AND] - ] - filter_by_metadata = sqlalchemy.and_(*and_clauses) - - else: - filter_by_metadata = None - - return filter_by_metadata - - def _create_filter_clause_json_deprecated( - self, filter: Any - ) -> List[SQLColumnExpression]: - """Convert filters from IR to SQL clauses. - - **DEPRECATED** This functionality will be deprecated in the future. - - It implements translation of filters for a schema that uses JSON - for metadata rather than the JSONB field which is more efficient - for querying. - """ - filter_clauses = [] - for key, value in filter.items(): - if isinstance(value, dict): - filter_by_metadata = self._create_filter_clause_deprecated(key, value) - - if filter_by_metadata is not None: - filter_clauses.append(filter_by_metadata) - else: - filter_by_metadata = self.EmbeddingStore.cmetadata[key].astext == str( - value - ) - filter_clauses.append(filter_by_metadata) - return filter_clauses - - def _create_filter_clause(self, filters: Any) -> Any: - """Convert LangChain IR filter representation to matching SQLAlchemy clauses. - - At the top level, we still don't know if we're working with a field - or an operator for the keys. After we've determined that we can - call the appropriate logic to handle filter creation. - - Args: - filters: Dictionary of filters to apply to the query. - - Returns: - SQLAlchemy clause to apply to the query. - """ - if isinstance(filters, dict): - if len(filters) == 1: - # The only operators allowed at the top level are $AND and $OR - # First check if an operator or a field - key, value = list(filters.items())[0] - if key.startswith("$"): - # Then it's an operator - if key.lower() not in ["$and", "$or"]: - raise ValueError( - f"Invalid filter condition. Expected $and or $or " - f"but got: {key}" - ) - else: - # Then it's a field - return self._handle_field_filter(key, filters[key]) - - # Here we handle the $and and $or operators - if not isinstance(value, list): - raise ValueError( - f"Expected a list, but got {type(value)} for value: {value}" - ) - if key.lower() == "$and": - and_ = [self._create_filter_clause(el) for el in value] - if len(and_) > 1: - return sqlalchemy.and_(*and_) - elif len(and_) == 1: - return and_[0] - else: - raise ValueError( - "Invalid filter condition. Expected a dictionary " - "but got an empty dictionary" - ) - elif key.lower() == "$or": - or_ = [self._create_filter_clause(el) for el in value] - if len(or_) > 1: - return sqlalchemy.or_(*or_) - elif len(or_) == 1: - return or_[0] - else: - raise ValueError( - "Invalid filter condition. Expected a dictionary " - "but got an empty dictionary" - ) - else: - raise ValueError( - f"Invalid filter condition. Expected $and or $or " - f"but got: {key}" - ) - elif len(filters) > 1: - # Then all keys have to be fields (they cannot be operators) - for key in filters.keys(): - if key.startswith("$"): - raise ValueError( - f"Invalid filter condition. Expected a field but got: {key}" - ) - # These should all be fields and combined using an $and operator - and_ = [self._handle_field_filter(k, v) for k, v in filters.items()] - if len(and_) > 1: - return sqlalchemy.and_(*and_) - elif len(and_) == 1: - return and_[0] - else: - raise ValueError( - "Invalid filter condition. Expected a dictionary " - "but got an empty dictionary" - ) - else: - raise ValueError("Got an empty dictionary for filters.") - else: - raise ValueError( - f"Invalid type: Expected a dictionary but got type: {type(filters)}" - ) - - def __query_collection( - self, - embedding: List[float], - k: int = 4, - filter: Optional[Dict[str, str]] = None, - ) -> List[Any]: - """Query the collection.""" - with Session(self._bind) as session: # type: ignore[arg-type] - collection = self.get_collection(session) - if not collection: - raise ValueError("Collection not found") - - filter_by = [self.EmbeddingStore.collection_id == collection.uuid] - if filter: - if self.use_jsonb: - filter_clauses = self._create_filter_clause(filter) - if filter_clauses is not None: - filter_by.append(filter_clauses) - else: - # Old way of doing things - filter_clauses = self._create_filter_clause_json_deprecated(filter) - filter_by.extend(filter_clauses) - - _type = self.EmbeddingStore - - results: List[Any] = ( - session.query( - self.EmbeddingStore, - self.distance_strategy(embedding).label("distance"), # type: ignore - ) - .filter(*filter_by) - .order_by(sqlalchemy.asc("distance")) - .join( - self.CollectionStore, - self.EmbeddingStore.collection_id == self.CollectionStore.uuid, - ) - .limit(k) - .all() - ) - - return results - - def similarity_search_by_vector( - self, - embedding: List[float], - k: int = 4, - filter: Optional[dict] = None, - **kwargs: Any, - ) -> List[Document]: - """Return docs most similar to embedding vector. - - Args: - embedding: Embedding to look up documents similar to. - k: Number of Documents to return. Defaults to 4. - filter (Optional[Dict[str, str]]): Filter by metadata. Defaults to None. - - Returns: - List of Documents most similar to the query vector. - """ - docs_and_scores = self.similarity_search_with_score_by_vector( - embedding=embedding, k=k, filter=filter - ) - return _results_to_docs(docs_and_scores) - - @classmethod - def from_texts( - cls: Type[PGVector], - texts: List[str], - embedding: Embeddings, - metadatas: Optional[List[dict]] = None, - collection_name: str = _LANGCHAIN_DEFAULT_COLLECTION_NAME, - distance_strategy: DistanceStrategy = DEFAULT_DISTANCE_STRATEGY, - ids: Optional[List[str]] = None, - pre_delete_collection: bool = False, - *, - use_jsonb: bool = False, - **kwargs: Any, - ) -> PGVector: - """ - Return VectorStore initialized from texts and embeddings. - Postgres connection string is required - "Either pass it as a parameter - or set the PGVECTOR_CONNECTION_STRING environment variable. - """ - embeddings = embedding.embed_documents(list(texts)) - - return cls.__from( - texts, - embeddings, - embedding, - metadatas=metadatas, - ids=ids, - collection_name=collection_name, - distance_strategy=distance_strategy, - pre_delete_collection=pre_delete_collection, - use_jsonb=use_jsonb, - **kwargs, - ) - - @classmethod - def from_embeddings( - cls, - text_embeddings: List[Tuple[str, List[float]]], - embedding: Embeddings, - metadatas: Optional[List[dict]] = None, - collection_name: str = _LANGCHAIN_DEFAULT_COLLECTION_NAME, - distance_strategy: DistanceStrategy = DEFAULT_DISTANCE_STRATEGY, - ids: Optional[List[str]] = None, - pre_delete_collection: bool = False, - **kwargs: Any, - ) -> PGVector: - """Construct PGVector wrapper from raw documents and pre- - generated embeddings. - - Return VectorStore initialized from documents and embeddings. - Postgres connection string is required - "Either pass it as a parameter - or set the PGVECTOR_CONNECTION_STRING environment variable. - - Example: - .. code-block:: python - - from langchain_community.vectorstores import PGVector - from langchain_community.embeddings import OpenAIEmbeddings - embeddings = OpenAIEmbeddings() - text_embeddings = embeddings.embed_documents(texts) - text_embedding_pairs = list(zip(texts, text_embeddings)) - faiss = PGVector.from_embeddings(text_embedding_pairs, embeddings) - """ - texts = [t[0] for t in text_embeddings] - embeddings = [t[1] for t in text_embeddings] - - return cls.__from( - texts, - embeddings, - embedding, - metadatas=metadatas, - ids=ids, - collection_name=collection_name, - distance_strategy=distance_strategy, - pre_delete_collection=pre_delete_collection, - **kwargs, - ) - - @classmethod - def from_existing_index( - cls: Type[PGVector], - embedding: Embeddings, - collection_name: str = _LANGCHAIN_DEFAULT_COLLECTION_NAME, - distance_strategy: DistanceStrategy = DEFAULT_DISTANCE_STRATEGY, - pre_delete_collection: bool = False, - **kwargs: Any, - ) -> PGVector: - """ - Get instance of an existing PGVector store.This method will - return the instance of the store without inserting any new - embeddings - """ - - connection_string = cls.get_connection_string(kwargs) - - store = cls( - connection_string=connection_string, - collection_name=collection_name, - embedding_function=embedding, - distance_strategy=distance_strategy, - pre_delete_collection=pre_delete_collection, - ) - - return store - - @classmethod - def get_connection_string(cls, kwargs: Dict[str, Any]) -> str: - connection_string: str = get_from_dict_or_env( - data=kwargs, - key="connection_string", - env_key="PGVECTOR_CONNECTION_STRING", - ) - - if not connection_string: - raise ValueError( - "Postgres connection string is required" - "Either pass it as a parameter" - "or set the PGVECTOR_CONNECTION_STRING environment variable." - ) - - return connection_string - - @classmethod - def from_documents( - cls: Type[PGVector], - documents: List[Document], - embedding: Embeddings, - collection_name: str = _LANGCHAIN_DEFAULT_COLLECTION_NAME, - distance_strategy: DistanceStrategy = DEFAULT_DISTANCE_STRATEGY, - ids: Optional[List[str]] = None, - pre_delete_collection: bool = False, - *, - use_jsonb: bool = False, - **kwargs: Any, - ) -> PGVector: - """ - Return VectorStore initialized from documents and embeddings. - Postgres connection string is required - "Either pass it as a parameter - or set the PGVECTOR_CONNECTION_STRING environment variable. - """ - - texts = [d.page_content for d in documents] - metadatas = [d.metadata for d in documents] - connection_string = cls.get_connection_string(kwargs) - - kwargs["connection_string"] = connection_string - - return cls.from_texts( - texts=texts, - pre_delete_collection=pre_delete_collection, - embedding=embedding, - distance_strategy=distance_strategy, - metadatas=metadatas, - ids=ids, - collection_name=collection_name, - use_jsonb=use_jsonb, - **kwargs, - ) - - @classmethod - def connection_string_from_db_params( - cls, - driver: str, - host: str, - port: int, - database: str, - user: str, - password: str, - ) -> str: - """Return connection string from database parameters.""" - return f"postgresql+{driver}://{user}:{password}@{host}:{port}/{database}" - - def _select_relevance_score_fn(self) -> Callable[[float], float]: - """ - The 'correct' relevance function - may differ depending on a few things, including: - - the distance / similarity metric used by the VectorStore - - the scale of your embeddings (OpenAI's are unit normed. Many others are not!) - - embedding dimensionality - - etc. - """ - if self.override_relevance_score_fn is not None: - return self.override_relevance_score_fn - - # Default strategy is to rely on distance strategy provided - # in vectorstore constructor - if self._distance_strategy == DistanceStrategy.COSINE: - return self._cosine_relevance_score_fn - elif self._distance_strategy == DistanceStrategy.EUCLIDEAN: - return self._euclidean_relevance_score_fn - elif self._distance_strategy == DistanceStrategy.MAX_INNER_PRODUCT: - return self._max_inner_product_relevance_score_fn - else: - raise ValueError( - "No supported normalization function" - f" for distance_strategy of {self._distance_strategy}." - "Consider providing relevance_score_fn to PGVector constructor." - ) - - def max_marginal_relevance_search_with_score_by_vector( - self, - embedding: List[float], - k: int = 4, - fetch_k: int = 20, - lambda_mult: float = 0.5, - filter: Optional[Dict[str, str]] = None, - **kwargs: Any, - ) -> List[Tuple[Document, float]]: - """Return docs selected using the maximal marginal relevance with score - to embedding vector. - - Maximal marginal relevance optimizes for similarity to query AND diversity - among selected documents. - - Args: - embedding: Embedding to look up documents similar to. - k (int): Number of Documents to return. Defaults to 4. - fetch_k (int): Number of Documents to fetch to pass to MMR algorithm. - Defaults to 20. - lambda_mult (float): Number between 0 and 1 that determines the degree - of diversity among the results with 0 corresponding - to maximum diversity and 1 to minimum diversity. - Defaults to 0.5. - filter (Optional[Dict[str, str]]): Filter by metadata. Defaults to None. - - Returns: - List[Tuple[Document, float]]: List of Documents selected by maximal marginal - relevance to the query and score for each. - """ - results = self.__query_collection(embedding=embedding, k=fetch_k, filter=filter) - - embedding_list = [result.EmbeddingStore.embedding for result in results] - - mmr_selected = maximal_marginal_relevance( - np.array(embedding, dtype=np.float32), - embedding_list, - k=k, - lambda_mult=lambda_mult, - ) - - candidates = self._results_to_docs_and_scores(results) - - return [r for i, r in enumerate(candidates) if i in mmr_selected] - - def max_marginal_relevance_search( - self, - query: str, - k: int = 4, - fetch_k: int = 20, - lambda_mult: float = 0.5, - filter: Optional[Dict[str, str]] = None, - **kwargs: Any, - ) -> List[Document]: - """Return docs selected using the maximal marginal relevance. - - Maximal marginal relevance optimizes for similarity to query AND diversity - among selected documents. - - Args: - query (str): Text to look up documents similar to. - k (int): Number of Documents to return. Defaults to 4. - fetch_k (int): Number of Documents to fetch to pass to MMR algorithm. - Defaults to 20. - lambda_mult (float): Number between 0 and 1 that determines the degree - of diversity among the results with 0 corresponding - to maximum diversity and 1 to minimum diversity. - Defaults to 0.5. - filter (Optional[Dict[str, str]]): Filter by metadata. Defaults to None. - - Returns: - List[Document]: List of Documents selected by maximal marginal relevance. - """ - embedding = self.embedding_function.embed_query(query) - return self.max_marginal_relevance_search_by_vector( - embedding, - k=k, - fetch_k=fetch_k, - lambda_mult=lambda_mult, - filter=filter, - **kwargs, - ) - - def max_marginal_relevance_search_with_score( - self, - query: str, - k: int = 4, - fetch_k: int = 20, - lambda_mult: float = 0.5, - filter: Optional[dict] = None, - **kwargs: Any, - ) -> List[Tuple[Document, float]]: - """Return docs selected using the maximal marginal relevance with score. - - Maximal marginal relevance optimizes for similarity to query AND diversity - among selected documents. - - Args: - query (str): Text to look up documents similar to. - k (int): Number of Documents to return. Defaults to 4. - fetch_k (int): Number of Documents to fetch to pass to MMR algorithm. - Defaults to 20. - lambda_mult (float): Number between 0 and 1 that determines the degree - of diversity among the results with 0 corresponding - to maximum diversity and 1 to minimum diversity. - Defaults to 0.5. - filter (Optional[Dict[str, str]]): Filter by metadata. Defaults to None. - - Returns: - List[Tuple[Document, float]]: List of Documents selected by maximal marginal - relevance to the query and score for each. - """ - embedding = self.embedding_function.embed_query(query) - docs = self.max_marginal_relevance_search_with_score_by_vector( - embedding=embedding, - k=k, - fetch_k=fetch_k, - lambda_mult=lambda_mult, - filter=filter, - **kwargs, - ) - return docs - - def max_marginal_relevance_search_by_vector( - self, - embedding: List[float], - k: int = 4, - fetch_k: int = 20, - lambda_mult: float = 0.5, - filter: Optional[Dict[str, str]] = None, - **kwargs: Any, - ) -> List[Document]: - """Return docs selected using the maximal marginal relevance - to embedding vector. - - Maximal marginal relevance optimizes for similarity to query AND diversity - among selected documents. - - Args: - embedding (str): Text to look up documents similar to. - k (int): Number of Documents to return. Defaults to 4. - fetch_k (int): Number of Documents to fetch to pass to MMR algorithm. - Defaults to 20. - lambda_mult (float): Number between 0 and 1 that determines the degree - of diversity among the results with 0 corresponding - to maximum diversity and 1 to minimum diversity. - Defaults to 0.5. - filter (Optional[Dict[str, str]]): Filter by metadata. Defaults to None. - - Returns: - List[Document]: List of Documents selected by maximal marginal relevance. - """ - docs_and_scores = self.max_marginal_relevance_search_with_score_by_vector( - embedding, - k=k, - fetch_k=fetch_k, - lambda_mult=lambda_mult, - filter=filter, - **kwargs, - ) - - return _results_to_docs(docs_and_scores) - - async def amax_marginal_relevance_search_by_vector( - self, - embedding: List[float], - k: int = 4, - fetch_k: int = 20, - lambda_mult: float = 0.5, - filter: Optional[Dict[str, str]] = None, - **kwargs: Any, - ) -> List[Document]: - """Return docs selected using the maximal marginal relevance.""" - - # This is a temporary workaround to make the similarity search - # asynchronous. The proper solution is to make the similarity search - # asynchronous in the vector store implementations. - return await run_in_executor( - None, - self.max_marginal_relevance_search_by_vector, - embedding, - k=k, - fetch_k=fetch_k, - lambda_mult=lambda_mult, - filter=filter, - **kwargs, - ) diff --git a/libs/partners/postgres/poetry.lock b/libs/partners/postgres/poetry.lock deleted file mode 100644 index 8508a0b4ae..0000000000 --- a/libs/partners/postgres/poetry.lock +++ /dev/null @@ -1,987 +0,0 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. - -[[package]] -name = "annotated-types" -version = "0.6.0" -description = "Reusable constraint types to use with typing.Annotated" -optional = false -python-versions = ">=3.8" -files = [ - {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, - {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, -] - -[[package]] -name = "certifi" -version = "2024.2.2" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, -] - -[[package]] -name = "charset-normalizer" -version = "3.3.2" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, -] - -[[package]] -name = "codespell" -version = "2.2.6" -description = "Codespell" -optional = false -python-versions = ">=3.8" -files = [ - {file = "codespell-2.2.6-py3-none-any.whl", hash = "sha256:9ee9a3e5df0990604013ac2a9f22fa8e57669c827124a2e961fe8a1da4cacc07"}, - {file = "codespell-2.2.6.tar.gz", hash = "sha256:a8c65d8eb3faa03deabab6b3bbe798bea72e1799c7e9e955d57eca4096abcff9"}, -] - -[package.extras] -dev = ["Pygments", "build", "chardet", "pre-commit", "pytest", "pytest-cov", "pytest-dependency", "ruff", "tomli", "twine"] -hard-encoding-detection = ["chardet"] -toml = ["tomli"] -types = ["chardet (>=5.1.0)", "mypy", "pytest", "pytest-cov", "pytest-dependency"] - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "exceptiongroup" -version = "1.2.0" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, - {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, -] - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "greenlet" -version = "3.0.3" -description = "Lightweight in-process concurrent programming" -optional = false -python-versions = ">=3.7" -files = [ - {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, - {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, - {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, - {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, - {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, - {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, - {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, - {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, - {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, - {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, - {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, - {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, - {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, - {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, - {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, - {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, -] - -[package.extras] -docs = ["Sphinx", "furo"] -test = ["objgraph", "psutil"] - -[[package]] -name = "idna" -version = "3.6" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.5" -files = [ - {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, - {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, -] - -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.7" -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - -[[package]] -name = "jsonpatch" -version = "1.33" -description = "Apply JSON-Patches (RFC 6902)" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" -files = [ - {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, - {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, -] - -[package.dependencies] -jsonpointer = ">=1.9" - -[[package]] -name = "jsonpointer" -version = "2.4" -description = "Identify specific nodes in a JSON document (RFC 6901)" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" -files = [ - {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, - {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, -] - -[[package]] -name = "langchain-core" -version = "0.1.40" -description = "Building applications with LLMs through composability" -optional = false -python-versions = ">=3.8.1,<4.0" -files = [] -develop = true - -[package.dependencies] -jsonpatch = "^1.33" -langsmith = "^0.1.0" -packaging = "^23.2" -pydantic = ">=1,<3" -PyYAML = ">=5.3" -tenacity = "^8.1.0" - -[package.extras] -extended-testing = ["jinja2 (>=3,<4)"] - -[package.source] -type = "directory" -url = "../../core" - -[[package]] -name = "langgraph" -version = "0.0.32" -description = "langgraph" -optional = false -python-versions = "<4.0,>=3.9.0" -files = [ - {file = "langgraph-0.0.32-py3-none-any.whl", hash = "sha256:b9330b75b420f6fc0b8b238c3dd974166e4e779fd11b6c73c58754db14644cb5"}, - {file = "langgraph-0.0.32.tar.gz", hash = "sha256:28338cc525ae82b240de89bffec1bae412fedb4edb6267de5c7f944c47ea8263"}, -] - -[package.dependencies] -langchain-core = ">=0.1.38,<0.2.0" - -[[package]] -name = "langsmith" -version = "0.1.40" -description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "langsmith-0.1.40-py3-none-any.whl", hash = "sha256:aa47d0f5a1eabd5c05ac6ce2cd3e28ccfc554d366e856a27b7c3c17c443881cb"}, - {file = "langsmith-0.1.40.tar.gz", hash = "sha256:50fdf313741cf94e978de06025fd180b56acf1d1a4549b0fd5453ef23d5461ef"}, -] - -[package.dependencies] -orjson = ">=3.9.14,<4.0.0" -pydantic = ">=1,<3" -requests = ">=2,<3" - -[[package]] -name = "mypy" -version = "1.9.0" -description = "Optional static typing for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"}, - {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"}, - {file = "mypy-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150"}, - {file = "mypy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374"}, - {file = "mypy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03"}, - {file = "mypy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3"}, - {file = "mypy-1.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc"}, - {file = "mypy-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129"}, - {file = "mypy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612"}, - {file = "mypy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3"}, - {file = "mypy-1.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd"}, - {file = "mypy-1.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6"}, - {file = "mypy-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185"}, - {file = "mypy-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913"}, - {file = "mypy-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"}, - {file = "mypy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b"}, - {file = "mypy-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2"}, - {file = "mypy-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e"}, - {file = "mypy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04"}, - {file = "mypy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89"}, - {file = "mypy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02"}, - {file = "mypy-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4"}, - {file = "mypy-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d"}, - {file = "mypy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf"}, - {file = "mypy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9"}, - {file = "mypy-1.9.0-py3-none-any.whl", hash = "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e"}, - {file = "mypy-1.9.0.tar.gz", hash = "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974"}, -] - -[package.dependencies] -mypy-extensions = ">=1.0.0" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=4.1.0" - -[package.extras] -dmypy = ["psutil (>=4.0)"] -install-types = ["pip"] -mypyc = ["setuptools (>=50)"] -reports = ["lxml"] - -[[package]] -name = "mypy-extensions" -version = "1.0.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.5" -files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, -] - -[[package]] -name = "numpy" -version = "1.26.4" -description = "Fundamental package for array computing in Python" -optional = false -python-versions = ">=3.9" -files = [ - {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, - {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, - {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, - {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, - {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, - {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, - {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, - {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, - {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, - {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, - {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, -] - -[[package]] -name = "orjson" -version = "3.10.0" -description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" -optional = false -python-versions = ">=3.8" -files = [ - {file = "orjson-3.10.0-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:47af5d4b850a2d1328660661f0881b67fdbe712aea905dadd413bdea6f792c33"}, - {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c90681333619d78360d13840c7235fdaf01b2b129cb3a4f1647783b1971542b6"}, - {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:400c5b7c4222cb27b5059adf1fb12302eebcabf1978f33d0824aa5277ca899bd"}, - {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5dcb32e949eae80fb335e63b90e5808b4b0f64e31476b3777707416b41682db5"}, - {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7d507c7493252c0a0264b5cc7e20fa2f8622b8a83b04d819b5ce32c97cf57b"}, - {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e286a51def6626f1e0cc134ba2067dcf14f7f4b9550f6dd4535fd9d79000040b"}, - {file = "orjson-3.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8acd4b82a5f3a3ec8b1dc83452941d22b4711964c34727eb1e65449eead353ca"}, - {file = "orjson-3.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:30707e646080dd3c791f22ce7e4a2fc2438765408547c10510f1f690bd336217"}, - {file = "orjson-3.10.0-cp310-none-win32.whl", hash = "sha256:115498c4ad34188dcb73464e8dc80e490a3e5e88a925907b6fedcf20e545001a"}, - {file = "orjson-3.10.0-cp310-none-win_amd64.whl", hash = "sha256:6735dd4a5a7b6df00a87d1d7a02b84b54d215fb7adac50dd24da5997ffb4798d"}, - {file = "orjson-3.10.0-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9587053e0cefc284e4d1cd113c34468b7d3f17666d22b185ea654f0775316a26"}, - {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bef1050b1bdc9ea6c0d08468e3e61c9386723633b397e50b82fda37b3563d72"}, - {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d16c6963ddf3b28c0d461641517cd312ad6b3cf303d8b87d5ef3fa59d6844337"}, - {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4251964db47ef090c462a2d909f16c7c7d5fe68e341dabce6702879ec26d1134"}, - {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:73bbbdc43d520204d9ef0817ac03fa49c103c7f9ea94f410d2950755be2c349c"}, - {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:414e5293b82373606acf0d66313aecb52d9c8c2404b1900683eb32c3d042dbd7"}, - {file = "orjson-3.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:feaed5bb09877dc27ed0d37f037ddef6cb76d19aa34b108db270d27d3d2ef747"}, - {file = "orjson-3.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5127478260db640323cea131ee88541cb1a9fbce051f0b22fa2f0892f44da302"}, - {file = "orjson-3.10.0-cp311-none-win32.whl", hash = "sha256:b98345529bafe3c06c09996b303fc0a21961820d634409b8639bc16bd4f21b63"}, - {file = "orjson-3.10.0-cp311-none-win_amd64.whl", hash = "sha256:658ca5cee3379dd3d37dbacd43d42c1b4feee99a29d847ef27a1cb18abdfb23f"}, - {file = "orjson-3.10.0-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:4329c1d24fd130ee377e32a72dc54a3c251e6706fccd9a2ecb91b3606fddd998"}, - {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef0f19fdfb6553342b1882f438afd53c7cb7aea57894c4490c43e4431739c700"}, - {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c4f60db24161534764277f798ef53b9d3063092f6d23f8f962b4a97edfa997a0"}, - {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1de3fd5c7b208d836f8ecb4526995f0d5877153a4f6f12f3e9bf11e49357de98"}, - {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f93e33f67729d460a177ba285002035d3f11425ed3cebac5f6ded4ef36b28344"}, - {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:237ba922aef472761acd697eef77fef4831ab769a42e83c04ac91e9f9e08fa0e"}, - {file = "orjson-3.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:98c1bfc6a9bec52bc8f0ab9b86cc0874b0299fccef3562b793c1576cf3abb570"}, - {file = "orjson-3.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:30d795a24be16c03dca0c35ca8f9c8eaaa51e3342f2c162d327bd0225118794a"}, - {file = "orjson-3.10.0-cp312-none-win32.whl", hash = "sha256:6a3f53dc650bc860eb26ec293dfb489b2f6ae1cbfc409a127b01229980e372f7"}, - {file = "orjson-3.10.0-cp312-none-win_amd64.whl", hash = "sha256:983db1f87c371dc6ffc52931eb75f9fe17dc621273e43ce67bee407d3e5476e9"}, - {file = "orjson-3.10.0-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9a667769a96a72ca67237224a36faf57db0c82ab07d09c3aafc6f956196cfa1b"}, - {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ade1e21dfde1d37feee8cf6464c20a2f41fa46c8bcd5251e761903e46102dc6b"}, - {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:23c12bb4ced1c3308eff7ba5c63ef8f0edb3e4c43c026440247dd6c1c61cea4b"}, - {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2d014cf8d4dc9f03fc9f870de191a49a03b1bcda51f2a957943fb9fafe55aac"}, - {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eadecaa16d9783affca33597781328e4981b048615c2ddc31c47a51b833d6319"}, - {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd583341218826f48bd7c6ebf3310b4126216920853cbc471e8dbeaf07b0b80e"}, - {file = "orjson-3.10.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:90bfc137c75c31d32308fd61951d424424426ddc39a40e367704661a9ee97095"}, - {file = "orjson-3.10.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:13b5d3c795b09a466ec9fcf0bd3ad7b85467d91a60113885df7b8d639a9d374b"}, - {file = "orjson-3.10.0-cp38-none-win32.whl", hash = "sha256:5d42768db6f2ce0162544845facb7c081e9364a5eb6d2ef06cd17f6050b048d8"}, - {file = "orjson-3.10.0-cp38-none-win_amd64.whl", hash = "sha256:33e6655a2542195d6fd9f850b428926559dee382f7a862dae92ca97fea03a5ad"}, - {file = "orjson-3.10.0-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:4050920e831a49d8782a1720d3ca2f1c49b150953667eed6e5d63a62e80f46a2"}, - {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1897aa25a944cec774ce4a0e1c8e98fb50523e97366c637b7d0cddabc42e6643"}, - {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9bf565a69e0082ea348c5657401acec3cbbb31564d89afebaee884614fba36b4"}, - {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b6ebc17cfbbf741f5c1a888d1854354536f63d84bee537c9a7c0335791bb9009"}, - {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d2817877d0b69f78f146ab305c5975d0618df41acf8811249ee64231f5953fee"}, - {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57d017863ec8aa4589be30a328dacd13c2dc49de1c170bc8d8c8a98ece0f2925"}, - {file = "orjson-3.10.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:22c2f7e377ac757bd3476ecb7480c8ed79d98ef89648f0176deb1da5cd014eb7"}, - {file = "orjson-3.10.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e62ba42bfe64c60c1bc84799944f80704e996592c6b9e14789c8e2a303279912"}, - {file = "orjson-3.10.0-cp39-none-win32.whl", hash = "sha256:60c0b1bdbccd959ebd1575bd0147bd5e10fc76f26216188be4a36b691c937077"}, - {file = "orjson-3.10.0-cp39-none-win_amd64.whl", hash = "sha256:175a41500ebb2fdf320bf78e8b9a75a1279525b62ba400b2b2444e274c2c8bee"}, - {file = "orjson-3.10.0.tar.gz", hash = "sha256:ba4d8cac5f2e2cff36bea6b6481cdb92b38c202bcec603d6f5ff91960595a1ed"}, -] - -[[package]] -name = "packaging" -version = "23.2" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.7" -files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, -] - -[[package]] -name = "pgvector" -version = "0.2.5" -description = "pgvector support for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pgvector-0.2.5-py2.py3-none-any.whl", hash = "sha256:5e5e93ec4d3c45ab1fa388729d56c602f6966296e19deee8878928c6d567e41b"}, -] - -[package.dependencies] -numpy = "*" - -[[package]] -name = "pluggy" -version = "1.4.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, - {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "psycopg" -version = "3.1.18" -description = "PostgreSQL database adapter for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "psycopg-3.1.18-py3-none-any.whl", hash = "sha256:4d5a0a5a8590906daa58ebd5f3cfc34091377354a1acced269dd10faf55da60e"}, - {file = "psycopg-3.1.18.tar.gz", hash = "sha256:31144d3fb4c17d78094d9e579826f047d4af1da6a10427d91dfcfb6ecdf6f12b"}, -] - -[package.dependencies] -typing-extensions = ">=4.1" -tzdata = {version = "*", markers = "sys_platform == \"win32\""} - -[package.extras] -binary = ["psycopg-binary (==3.1.18)"] -c = ["psycopg-c (==3.1.18)"] -dev = ["black (>=24.1.0)", "codespell (>=2.2)", "dnspython (>=2.1)", "flake8 (>=4.0)", "mypy (>=1.4.1)", "types-setuptools (>=57.4)", "wheel (>=0.37)"] -docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.12)"] -pool = ["psycopg-pool"] -test = ["anyio (>=3.6.2,<4.0)", "mypy (>=1.4.1)", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"] - -[[package]] -name = "psycopg-pool" -version = "3.2.1" -description = "Connection Pool for Psycopg" -optional = false -python-versions = ">=3.8" -files = [ - {file = "psycopg-pool-3.2.1.tar.gz", hash = "sha256:6509a75c073590952915eddbba7ce8b8332a440a31e77bba69561483492829ad"}, - {file = "psycopg_pool-3.2.1-py3-none-any.whl", hash = "sha256:060b551d1b97a8d358c668be58b637780b884de14d861f4f5ecc48b7563aafb7"}, -] - -[package.dependencies] -typing-extensions = ">=4.4" - -[[package]] -name = "pydantic" -version = "2.6.4" -description = "Data validation using Python type hints" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic-2.6.4-py3-none-any.whl", hash = "sha256:cc46fce86607580867bdc3361ad462bab9c222ef042d3da86f2fb333e1d916c5"}, - {file = "pydantic-2.6.4.tar.gz", hash = "sha256:b1704e0847db01817624a6b86766967f552dd9dbf3afba4004409f908dcc84e6"}, -] - -[package.dependencies] -annotated-types = ">=0.4.0" -pydantic-core = "2.16.3" -typing-extensions = ">=4.6.1" - -[package.extras] -email = ["email-validator (>=2.0.0)"] - -[[package]] -name = "pydantic-core" -version = "2.16.3" -description = "" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic_core-2.16.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:75b81e678d1c1ede0785c7f46690621e4c6e63ccd9192af1f0bd9d504bbb6bf4"}, - {file = "pydantic_core-2.16.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9c865a7ee6f93783bd5d781af5a4c43dadc37053a5b42f7d18dc019f8c9d2bd1"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:162e498303d2b1c036b957a1278fa0899d02b2842f1ff901b6395104c5554a45"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f583bd01bbfbff4eaee0868e6fc607efdfcc2b03c1c766b06a707abbc856187"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b926dd38db1519ed3043a4de50214e0d600d404099c3392f098a7f9d75029ff8"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:716b542728d4c742353448765aa7cdaa519a7b82f9564130e2b3f6766018c9ec"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc4ad7f7ee1a13d9cb49d8198cd7d7e3aa93e425f371a68235f784e99741561f"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd87f48924f360e5d1c5f770d6155ce0e7d83f7b4e10c2f9ec001c73cf475c99"}, - {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0df446663464884297c793874573549229f9eca73b59360878f382a0fc085979"}, - {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4df8a199d9f6afc5ae9a65f8f95ee52cae389a8c6b20163762bde0426275b7db"}, - {file = "pydantic_core-2.16.3-cp310-none-win32.whl", hash = "sha256:456855f57b413f077dff513a5a28ed838dbbb15082ba00f80750377eed23d132"}, - {file = "pydantic_core-2.16.3-cp310-none-win_amd64.whl", hash = "sha256:732da3243e1b8d3eab8c6ae23ae6a58548849d2e4a4e03a1924c8ddf71a387cb"}, - {file = "pydantic_core-2.16.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:519ae0312616026bf4cedc0fe459e982734f3ca82ee8c7246c19b650b60a5ee4"}, - {file = "pydantic_core-2.16.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b3992a322a5617ded0a9f23fd06dbc1e4bd7cf39bc4ccf344b10f80af58beacd"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d62da299c6ecb04df729e4b5c52dc0d53f4f8430b4492b93aa8de1f541c4aac"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2acca2be4bb2f2147ada8cac612f8a98fc09f41c89f87add7256ad27332c2fda"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b662180108c55dfbf1280d865b2d116633d436cfc0bba82323554873967b340"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7c6ed0dc9d8e65f24f5824291550139fe6f37fac03788d4580da0d33bc00c97"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1bb0827f56654b4437955555dc3aeeebeddc47c2d7ed575477f082622c49e"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e56f8186d6210ac7ece503193ec84104da7ceb98f68ce18c07282fcc2452e76f"}, - {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:936e5db01dd49476fa8f4383c259b8b1303d5dd5fb34c97de194560698cc2c5e"}, - {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33809aebac276089b78db106ee692bdc9044710e26f24a9a2eaa35a0f9fa70ba"}, - {file = "pydantic_core-2.16.3-cp311-none-win32.whl", hash = "sha256:ded1c35f15c9dea16ead9bffcde9bb5c7c031bff076355dc58dcb1cb436c4721"}, - {file = "pydantic_core-2.16.3-cp311-none-win_amd64.whl", hash = "sha256:d89ca19cdd0dd5f31606a9329e309d4fcbb3df860960acec32630297d61820df"}, - {file = "pydantic_core-2.16.3-cp311-none-win_arm64.whl", hash = "sha256:6162f8d2dc27ba21027f261e4fa26f8bcb3cf9784b7f9499466a311ac284b5b9"}, - {file = "pydantic_core-2.16.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f56ae86b60ea987ae8bcd6654a887238fd53d1384f9b222ac457070b7ac4cff"}, - {file = "pydantic_core-2.16.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9bd22a2a639e26171068f8ebb5400ce2c1bc7d17959f60a3b753ae13c632975"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4204e773b4b408062960e65468d5346bdfe139247ee5f1ca2a378983e11388a2"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f651dd19363c632f4abe3480a7c87a9773be27cfe1341aef06e8759599454120"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aaf09e615a0bf98d406657e0008e4a8701b11481840be7d31755dc9f97c44053"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e47755d8152c1ab5b55928ab422a76e2e7b22b5ed8e90a7d584268dd49e9c6b"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:500960cb3a0543a724a81ba859da816e8cf01b0e6aaeedf2c3775d12ee49cade"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf6204fe865da605285c34cf1172879d0314ff267b1c35ff59de7154f35fdc2e"}, - {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d33dd21f572545649f90c38c227cc8631268ba25c460b5569abebdd0ec5974ca"}, - {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49d5d58abd4b83fb8ce763be7794d09b2f50f10aa65c0f0c1696c677edeb7cbf"}, - {file = "pydantic_core-2.16.3-cp312-none-win32.whl", hash = "sha256:f53aace168a2a10582e570b7736cc5bef12cae9cf21775e3eafac597e8551fbe"}, - {file = "pydantic_core-2.16.3-cp312-none-win_amd64.whl", hash = "sha256:0d32576b1de5a30d9a97f300cc6a3f4694c428d956adbc7e6e2f9cad279e45ed"}, - {file = "pydantic_core-2.16.3-cp312-none-win_arm64.whl", hash = "sha256:ec08be75bb268473677edb83ba71e7e74b43c008e4a7b1907c6d57e940bf34b6"}, - {file = "pydantic_core-2.16.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b1f6f5938d63c6139860f044e2538baeee6f0b251a1816e7adb6cbce106a1f01"}, - {file = "pydantic_core-2.16.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2a1ef6a36fdbf71538142ed604ad19b82f67b05749512e47f247a6ddd06afdc7"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:704d35ecc7e9c31d48926150afada60401c55efa3b46cd1ded5a01bdffaf1d48"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d937653a696465677ed583124b94a4b2d79f5e30b2c46115a68e482c6a591c8a"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9803edf8e29bd825f43481f19c37f50d2b01899448273b3a7758441b512acf8"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72282ad4892a9fb2da25defeac8c2e84352c108705c972db82ab121d15f14e6d"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f752826b5b8361193df55afcdf8ca6a57d0232653494ba473630a83ba50d8c9"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4384a8f68ddb31a0b0c3deae88765f5868a1b9148939c3f4121233314ad5532c"}, - {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4b2bf78342c40b3dc830880106f54328928ff03e357935ad26c7128bbd66ce8"}, - {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:13dcc4802961b5f843a9385fc821a0b0135e8c07fc3d9949fd49627c1a5e6ae5"}, - {file = "pydantic_core-2.16.3-cp38-none-win32.whl", hash = "sha256:e3e70c94a0c3841e6aa831edab1619ad5c511199be94d0c11ba75fe06efe107a"}, - {file = "pydantic_core-2.16.3-cp38-none-win_amd64.whl", hash = "sha256:ecdf6bf5f578615f2e985a5e1f6572e23aa632c4bd1dc67f8f406d445ac115ed"}, - {file = "pydantic_core-2.16.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:bda1ee3e08252b8d41fa5537413ffdddd58fa73107171a126d3b9ff001b9b820"}, - {file = "pydantic_core-2.16.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:21b888c973e4f26b7a96491c0965a8a312e13be108022ee510248fe379a5fa23"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be0ec334369316fa73448cc8c982c01e5d2a81c95969d58b8f6e272884df0074"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b5b6079cc452a7c53dd378c6f881ac528246b3ac9aae0f8eef98498a75657805"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ee8d5f878dccb6d499ba4d30d757111847b6849ae07acdd1205fffa1fc1253c"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7233d65d9d651242a68801159763d09e9ec96e8a158dbf118dc090cd77a104c9"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6119dc90483a5cb50a1306adb8d52c66e447da88ea44f323e0ae1a5fcb14256"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:578114bc803a4c1ff9946d977c221e4376620a46cf78da267d946397dc9514a8"}, - {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d8f99b147ff3fcf6b3cc60cb0c39ea443884d5559a30b1481e92495f2310ff2b"}, - {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4ac6b4ce1e7283d715c4b729d8f9dab9627586dafce81d9eaa009dd7f25dd972"}, - {file = "pydantic_core-2.16.3-cp39-none-win32.whl", hash = "sha256:e7774b570e61cb998490c5235740d475413a1f6de823169b4cf94e2fe9e9f6b2"}, - {file = "pydantic_core-2.16.3-cp39-none-win_amd64.whl", hash = "sha256:9091632a25b8b87b9a605ec0e61f241c456e9248bfdcf7abdf344fdb169c81cf"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:36fa178aacbc277bc6b62a2c3da95226520da4f4e9e206fdf076484363895d2c"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:dcca5d2bf65c6fb591fff92da03f94cd4f315972f97c21975398bd4bd046854a"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a72fb9963cba4cd5793854fd12f4cfee731e86df140f59ff52a49b3552db241"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b60cc1a081f80a2105a59385b92d82278b15d80ebb3adb200542ae165cd7d183"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cbcc558401de90a746d02ef330c528f2e668c83350f045833543cd57ecead1ad"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fee427241c2d9fb7192b658190f9f5fd6dfe41e02f3c1489d2ec1e6a5ab1e04a"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4cb85f693044e0f71f394ff76c98ddc1bc0953e48c061725e540396d5c8a2e1"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b29eeb887aa931c2fcef5aa515d9d176d25006794610c264ddc114c053bf96fe"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a425479ee40ff021f8216c9d07a6a3b54b31c8267c6e17aa88b70d7ebd0e5e5b"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5c5cbc703168d1b7a838668998308018a2718c2130595e8e190220238addc96f"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b6add4c0b39a513d323d3b93bc173dac663c27b99860dd5bf491b240d26137"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f76ee558751746d6a38f89d60b6228fa174e5172d143886af0f85aa306fd89"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:00ee1c97b5364b84cb0bd82e9bbf645d5e2871fb8c58059d158412fee2d33d8a"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:287073c66748f624be4cef893ef9174e3eb88fe0b8a78dc22e88eca4bc357ca6"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed25e1835c00a332cb10c683cd39da96a719ab1dfc08427d476bce41b92531fc"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:86b3d0033580bd6bbe07590152007275bd7af95f98eaa5bd36f3da219dcd93da"}, - {file = "pydantic_core-2.16.3.tar.gz", hash = "sha256:1cac689f80a3abab2d3c0048b29eea5751114054f032a941a32de4c852c59cad"}, -] - -[package.dependencies] -typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" - -[[package]] -name = "pytest" -version = "7.4.4" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, - {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} - -[package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] - -[[package]] -name = "pytest-asyncio" -version = "0.23.6" -description = "Pytest support for asyncio" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest-asyncio-0.23.6.tar.gz", hash = "sha256:ffe523a89c1c222598c76856e76852b787504ddb72dd5d9b6617ffa8aa2cde5f"}, - {file = "pytest_asyncio-0.23.6-py3-none-any.whl", hash = "sha256:68516fdd1018ac57b846c9846b954f0393b26f094764a28c955eabb0536a4e8a"}, -] - -[package.dependencies] -pytest = ">=7.0.0,<9" - -[package.extras] -docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] -testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] - -[[package]] -name = "pyyaml" -version = "6.0.1" -description = "YAML parser and emitter for Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, -] - -[[package]] -name = "requests" -version = "2.31.0" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.7" -files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "ruff" -version = "0.1.15" -description = "An extremely fast Python linter and code formatter, written in Rust." -optional = false -python-versions = ">=3.7" -files = [ - {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:5fe8d54df166ecc24106db7dd6a68d44852d14eb0729ea4672bb4d96c320b7df"}, - {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6f0bfbb53c4b4de117ac4d6ddfd33aa5fc31beeaa21d23c45c6dd249faf9126f"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0d432aec35bfc0d800d4f70eba26e23a352386be3a6cf157083d18f6f5881c8"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9405fa9ac0e97f35aaddf185a1be194a589424b8713e3b97b762336ec79ff807"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66ec24fe36841636e814b8f90f572a8c0cb0e54d8b5c2d0e300d28a0d7bffec"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6f8ad828f01e8dd32cc58bc28375150171d198491fc901f6f98d2a39ba8e3ff5"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86811954eec63e9ea162af0ffa9f8d09088bab51b7438e8b6488b9401863c25e"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd4025ac5e87d9b80e1f300207eb2fd099ff8200fa2320d7dc066a3f4622dc6b"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b17b93c02cdb6aeb696effecea1095ac93f3884a49a554a9afa76bb125c114c1"}, - {file = "ruff-0.1.15-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ddb87643be40f034e97e97f5bc2ef7ce39de20e34608f3f829db727a93fb82c5"}, - {file = "ruff-0.1.15-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:abf4822129ed3a5ce54383d5f0e964e7fef74a41e48eb1dfad404151efc130a2"}, - {file = "ruff-0.1.15-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6c629cf64bacfd136c07c78ac10a54578ec9d1bd2a9d395efbee0935868bf852"}, - {file = "ruff-0.1.15-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1bab866aafb53da39c2cadfb8e1c4550ac5340bb40300083eb8967ba25481447"}, - {file = "ruff-0.1.15-py3-none-win32.whl", hash = "sha256:2417e1cb6e2068389b07e6fa74c306b2810fe3ee3476d5b8a96616633f40d14f"}, - {file = "ruff-0.1.15-py3-none-win_amd64.whl", hash = "sha256:3837ac73d869efc4182d9036b1405ef4c73d9b1f88da2413875e34e0d6919587"}, - {file = "ruff-0.1.15-py3-none-win_arm64.whl", hash = "sha256:9a933dfb1c14ec7a33cceb1e49ec4a16b51ce3c20fd42663198746efc0427360"}, - {file = "ruff-0.1.15.tar.gz", hash = "sha256:f6dfa8c1b21c913c326919056c390966648b680966febcb796cc9d1aaab8564e"}, -] - -[[package]] -name = "sqlalchemy" -version = "2.0.29" -description = "Database Abstraction Library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "SQLAlchemy-2.0.29-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4c142852ae192e9fe5aad5c350ea6befe9db14370b34047e1f0f7cf99e63c63b"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:99a1e69d4e26f71e750e9ad6fdc8614fbddb67cfe2173a3628a2566034e223c7"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ef3fbccb4058355053c51b82fd3501a6e13dd808c8d8cd2561e610c5456013c"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d6753305936eddc8ed190e006b7bb33a8f50b9854823485eed3a886857ab8d1"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0f3ca96af060a5250a8ad5a63699180bc780c2edf8abf96c58af175921df847a"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c4520047006b1d3f0d89e0532978c0688219857eb2fee7c48052560ae76aca1e"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-win32.whl", hash = "sha256:b2a0e3cf0caac2085ff172c3faacd1e00c376e6884b5bc4dd5b6b84623e29e4f"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-win_amd64.whl", hash = "sha256:01d10638a37460616708062a40c7b55f73e4d35eaa146781c683e0fa7f6c43fb"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:308ef9cb41d099099fffc9d35781638986870b29f744382904bf9c7dadd08513"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:296195df68326a48385e7a96e877bc19aa210e485fa381c5246bc0234c36c78e"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a13b917b4ffe5a0a31b83d051d60477819ddf18276852ea68037a144a506efb9"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f6d971255d9ddbd3189e2e79d743ff4845c07f0633adfd1de3f63d930dbe673"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:61405ea2d563407d316c63a7b5271ae5d274a2a9fbcd01b0aa5503635699fa1e"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:de7202ffe4d4a8c1e3cde1c03e01c1a3772c92858837e8f3879b497158e4cb44"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-win32.whl", hash = "sha256:b5d7ed79df55a731749ce65ec20d666d82b185fa4898430b17cb90c892741520"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-win_amd64.whl", hash = "sha256:205f5a2b39d7c380cbc3b5dcc8f2762fb5bcb716838e2d26ccbc54330775b003"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d96710d834a6fb31e21381c6d7b76ec729bd08c75a25a5184b1089141356171f"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:52de4736404e53c5c6a91ef2698c01e52333988ebdc218f14c833237a0804f1b"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c7b02525ede2a164c5fa5014915ba3591730f2cc831f5be9ff3b7fd3e30958e"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dfefdb3e54cd15f5d56fd5ae32f1da2d95d78319c1f6dfb9bcd0eb15d603d5d"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a88913000da9205b13f6f195f0813b6ffd8a0c0c2bd58d499e00a30eb508870c"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fecd5089c4be1bcc37c35e9aa678938d2888845a134dd016de457b942cf5a758"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-win32.whl", hash = "sha256:8197d6f7a3d2b468861ebb4c9f998b9df9e358d6e1cf9c2a01061cb9b6cf4e41"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-win_amd64.whl", hash = "sha256:9b19836ccca0d321e237560e475fd99c3d8655d03da80c845c4da20dda31b6e1"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:87a1d53a5382cdbbf4b7619f107cc862c1b0a4feb29000922db72e5a66a5ffc0"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a0732dffe32333211801b28339d2a0babc1971bc90a983e3035e7b0d6f06b93"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90453597a753322d6aa770c5935887ab1fc49cc4c4fdd436901308383d698b4b"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ea311d4ee9a8fa67f139c088ae9f905fcf0277d6cd75c310a21a88bf85e130f5"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5f20cb0a63a3e0ec4e169aa8890e32b949c8145983afa13a708bc4b0a1f30e03"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-win32.whl", hash = "sha256:e5bbe55e8552019c6463709b39634a5fc55e080d0827e2a3a11e18eb73f5cdbd"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-win_amd64.whl", hash = "sha256:c2f9c762a2735600654c654bf48dad388b888f8ce387b095806480e6e4ff6907"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7e614d7a25a43a9f54fcce4675c12761b248547f3d41b195e8010ca7297c369c"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:471fcb39c6adf37f820350c28aac4a7df9d3940c6548b624a642852e727ea586"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:988569c8732f54ad3234cf9c561364221a9e943b78dc7a4aaf35ccc2265f1930"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dddaae9b81c88083e6437de95c41e86823d150f4ee94bf24e158a4526cbead01"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:334184d1ab8f4c87f9652b048af3f7abea1c809dfe526fb0435348a6fef3d380"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:38b624e5cf02a69b113c8047cf7f66b5dfe4a2ca07ff8b8716da4f1b3ae81567"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-win32.whl", hash = "sha256:bab41acf151cd68bc2b466deae5deeb9e8ae9c50ad113444151ad965d5bf685b"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-win_amd64.whl", hash = "sha256:52c8011088305476691b8750c60e03b87910a123cfd9ad48576d6414b6ec2a1d"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3071ad498896907a5ef756206b9dc750f8e57352113c19272bdfdc429c7bd7de"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dba622396a3170974f81bad49aacebd243455ec3cc70615aeaef9e9613b5bca5"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b184e3de58009cc0bf32e20f137f1ec75a32470f5fede06c58f6c355ed42a72"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c37f1050feb91f3d6c32f864d8e114ff5545a4a7afe56778d76a9aec62638ba"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bda7ce59b06d0f09afe22c56714c65c957b1068dee3d5e74d743edec7daba552"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:25664e18bef6dc45015b08f99c63952a53a0a61f61f2e48a9e70cec27e55f699"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-win32.whl", hash = "sha256:77d29cb6c34b14af8a484e831ab530c0f7188f8efed1c6a833a2c674bf3c26ec"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-win_amd64.whl", hash = "sha256:04c487305ab035a9548f573763915189fc0fe0824d9ba28433196f8436f1449c"}, - {file = "SQLAlchemy-2.0.29-py3-none-any.whl", hash = "sha256:dc4ee2d4ee43251905f88637d5281a8d52e916a021384ec10758826f5cbae305"}, - {file = "SQLAlchemy-2.0.29.tar.gz", hash = "sha256:bd9566b8e58cabd700bc367b60e90d9349cd16f0984973f98a9a09f9c64e86f0"}, -] - -[package.dependencies] -greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} -typing-extensions = ">=4.6.0" - -[package.extras] -aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] -aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] -asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] -mssql = ["pyodbc"] -mssql-pymssql = ["pymssql"] -mssql-pyodbc = ["pyodbc"] -mypy = ["mypy (>=0.910)"] -mysql = ["mysqlclient (>=1.4.0)"] -mysql-connector = ["mysql-connector-python"] -oracle = ["cx_oracle (>=8)"] -oracle-oracledb = ["oracledb (>=1.0.1)"] -postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.29.1)"] -postgresql-psycopg = ["psycopg (>=3.0.7)"] -postgresql-psycopg2binary = ["psycopg2-binary"] -postgresql-psycopg2cffi = ["psycopg2cffi"] -postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] -pymysql = ["pymysql"] -sqlcipher = ["sqlcipher3_binary"] - -[[package]] -name = "tenacity" -version = "8.2.3" -description = "Retry code until it succeeds" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tenacity-8.2.3-py3-none-any.whl", hash = "sha256:ce510e327a630c9e1beaf17d42e6ffacc88185044ad85cf74c0a8887c6a0f88c"}, - {file = "tenacity-8.2.3.tar.gz", hash = "sha256:5398ef0d78e63f40007c1fb4c0bff96e1911394d2fa8d194f77619c05ff6cc8a"}, -] - -[package.extras] -doc = ["reno", "sphinx", "tornado (>=4.5)"] - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] - -[[package]] -name = "typing-extensions" -version = "4.11.0" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -files = [ - {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, - {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, -] - -[[package]] -name = "tzdata" -version = "2024.1" -description = "Provider of IANA time zone data" -optional = false -python-versions = ">=2" -files = [ - {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, - {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, -] - -[[package]] -name = "urllib3" -version = "2.2.1" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=3.8" -files = [ - {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, - {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -h2 = ["h2 (>=4,<5)"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - -[metadata] -lock-version = "2.0" -python-versions = "^3.9" -content-hash = "02a20cf8f1209824252361c78bffcdfa960bf92ef3214807cc9f494eb533b7e4" diff --git a/libs/partners/postgres/pyproject.toml b/libs/partners/postgres/pyproject.toml deleted file mode 100644 index 25be0deb99..0000000000 --- a/libs/partners/postgres/pyproject.toml +++ /dev/null @@ -1,94 +0,0 @@ -[tool.poetry] -name = "langchain-postgres" -version = "0.0.1" -description = "An integration package connecting Postgres and LangChain" -authors = [] -readme = "README.md" -repository = "https://github.com/langchain-ai/langchain" -license = "MIT" - -[tool.poetry.urls] -"Source Code" = "https://github.com/langchain-ai/langchain/tree/master/libs/partners/postgres" - -[tool.poetry.dependencies] -python = "^3.9" -langchain-core = "^0.1" -psycopg = "^3.1.18" -langgraph = "^0.0.32" -psycopg-pool = "^3.2.1" -sqlalchemy = "^2.0.29" -pgvector = "^0.2.5" -numpy = "^1.26.4" - -[tool.poetry.group.test] -optional = true - -[tool.poetry.group.test.dependencies] -pytest = "^7.4.3" -pytest-asyncio = "^0.23.2" -langchain-core = {path = "../../core", develop = true} - -[tool.poetry.group.codespell] -optional = true - -[tool.poetry.group.codespell.dependencies] -codespell = "^2.2.6" - -[tool.poetry.group.test_integration] -optional = true - -[tool.poetry.group.test_integration.dependencies] - -[tool.poetry.group.lint] -optional = true - -[tool.poetry.group.lint.dependencies] -ruff = "^0.1.8" - -[tool.poetry.group.typing.dependencies] -mypy = "^1.7.1" -langchain-core = {path = "../../core", develop = true} - -[tool.poetry.group.dev] -optional = true - -[tool.poetry.group.dev.dependencies] -langchain-core = {path = "../../core", develop = true} - -[tool.ruff.lint] -select = [ - "E", # pycodestyle - "F", # pyflakes - "I", # isort - "T201", # print -] - -[tool.mypy] -disallow_untyped_defs = "True" - -[tool.coverage.run] -omit = [ - "tests/*", -] - -[build-system] -requires = ["poetry-core>=1.0.0"] -build-backend = "poetry.core.masonry.api" - -[tool.pytest.ini_options] -# --strict-markers will raise errors on unknown marks. -# https://docs.pytest.org/en/7.1.x/how-to/mark.html#raising-errors-on-unknown-marks -# -# https://docs.pytest.org/en/7.1.x/reference/reference.html -# --strict-config any warnings encountered while parsing the `pytest` -# section of the configuration file raise errors. -# -# https://github.com/tophat/syrupy -# --snapshot-warn-unused Prints a warning on unused snapshots rather than fail the test suite. -addopts = "--strict-markers --strict-config --durations=5" -# Registering custom markers. -# https://docs.pytest.org/en/7.1.x/example/markers.html#registering-markers -markers = [ - "compile: mark placeholder test used to compile integration tests without running them", -] -asyncio_mode = "auto" diff --git a/libs/partners/postgres/scripts/check_imports.py b/libs/partners/postgres/scripts/check_imports.py deleted file mode 100644 index 365f5fa118..0000000000 --- a/libs/partners/postgres/scripts/check_imports.py +++ /dev/null @@ -1,17 +0,0 @@ -import sys -import traceback -from importlib.machinery import SourceFileLoader - -if __name__ == "__main__": - files = sys.argv[1:] - has_failure = False - for file in files: - try: - SourceFileLoader("x", file).load_module() - except Exception: - has_faillure = True - print(file) # noqa: T201 - traceback.print_exc() - print() # noqa: T201 - - sys.exit(1 if has_failure else 0) diff --git a/libs/partners/postgres/scripts/check_pydantic.sh b/libs/partners/postgres/scripts/check_pydantic.sh deleted file mode 100755 index 06b5bb81ae..0000000000 --- a/libs/partners/postgres/scripts/check_pydantic.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/bash -# -# This script searches for lines starting with "import pydantic" or "from pydantic" -# in tracked files within a Git repository. -# -# Usage: ./scripts/check_pydantic.sh /path/to/repository - -# Check if a path argument is provided -if [ $# -ne 1 ]; then - echo "Usage: $0 /path/to/repository" - exit 1 -fi - -repository_path="$1" - -# Search for lines matching the pattern within the specified repository -result=$(git -C "$repository_path" grep -E '^import pydantic|^from pydantic') - -# Check if any matching lines were found -if [ -n "$result" ]; then - echo "ERROR: The following lines need to be updated:" - echo "$result" - echo "Please replace the code with an import from langchain_core.pydantic_v1." - echo "For example, replace 'from pydantic import BaseModel'" - echo "with 'from langchain_core.pydantic_v1 import BaseModel'" - exit 1 -fi diff --git a/libs/partners/postgres/scripts/lint_imports.sh b/libs/partners/postgres/scripts/lint_imports.sh deleted file mode 100755 index 19ccec1480..0000000000 --- a/libs/partners/postgres/scripts/lint_imports.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash - -set -eu - -# Initialize a variable to keep track of errors -errors=0 - -# make sure not importing from langchain, langchain_experimental, or langchain_community -git --no-pager grep '^from langchain\.' . && errors=$((errors+1)) -git --no-pager grep '^from langchain_experimental\.' . && errors=$((errors+1)) -git --no-pager grep '^from langchain_community\.' . && errors=$((errors+1)) - -# Decide on an exit status based on the errors -if [ "$errors" -gt 0 ]; then - exit 1 -else - exit 0 -fi diff --git a/libs/partners/postgres/tests/__init__.py b/libs/partners/postgres/tests/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/libs/partners/postgres/tests/integration_tests/__init__.py b/libs/partners/postgres/tests/integration_tests/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/libs/partners/postgres/tests/integration_tests/fake_embeddings.py b/libs/partners/postgres/tests/integration_tests/fake_embeddings.py deleted file mode 100644 index 81fd2aa5ae..0000000000 --- a/libs/partners/postgres/tests/integration_tests/fake_embeddings.py +++ /dev/null @@ -1,28 +0,0 @@ -"""Copied from community.""" -from typing import List - -from langchain_core.embeddings import Embeddings - -fake_texts = ["foo", "bar", "baz"] - - -class FakeEmbeddings(Embeddings): - """Fake embeddings functionality for testing.""" - - def embed_documents(self, texts: List[str]) -> List[List[float]]: - """Return simple embeddings. - Embeddings encode each text as its index.""" - return [[float(1.0)] * 9 + [float(i)] for i in range(len(texts))] - - async def aembed_documents(self, texts: List[str]) -> List[List[float]]: - return self.embed_documents(texts) - - def embed_query(self, text: str) -> List[float]: - """Return constant query embeddings. - Embeddings are identical to embed_documents(texts)[0]. - Distance to each text will be that text's index, - as it was passed to embed_documents.""" - return [float(1.0)] * 9 + [float(0.0)] - - async def aembed_query(self, text: str) -> List[float]: - return self.embed_query(text) diff --git a/libs/partners/postgres/tests/integration_tests/fixtures/__init__.py b/libs/partners/postgres/tests/integration_tests/fixtures/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/libs/partners/postgres/tests/integration_tests/fixtures/filtering_test_cases.py b/libs/partners/postgres/tests/integration_tests/fixtures/filtering_test_cases.py deleted file mode 100644 index 9dcca44f56..0000000000 --- a/libs/partners/postgres/tests/integration_tests/fixtures/filtering_test_cases.py +++ /dev/null @@ -1,218 +0,0 @@ -"""Module needs to move to a stasndalone package.""" -from langchain_core.documents import Document - -metadatas = [ - { - "name": "adam", - "date": "2021-01-01", - "count": 1, - "is_active": True, - "tags": ["a", "b"], - "location": [1.0, 2.0], - "id": 1, - "height": 10.0, # Float column - "happiness": 0.9, # Float column - "sadness": 0.1, # Float column - }, - { - "name": "bob", - "date": "2021-01-02", - "count": 2, - "is_active": False, - "tags": ["b", "c"], - "location": [2.0, 3.0], - "id": 2, - "height": 5.7, # Float column - "happiness": 0.8, # Float column - "sadness": 0.1, # Float column - }, - { - "name": "jane", - "date": "2021-01-01", - "count": 3, - "is_active": True, - "tags": ["b", "d"], - "location": [3.0, 4.0], - "id": 3, - "height": 2.4, # Float column - "happiness": None, - # Sadness missing intentionally - }, -] -texts = ["id {id}".format(id=metadata["id"]) for metadata in metadatas] - -DOCUMENTS = [ - Document(page_content=text, metadata=metadata) - for text, metadata in zip(texts, metadatas) -] - - -TYPE_1_FILTERING_TEST_CASES = [ - # These tests only involve equality checks - ( - {"id": 1}, - [1], - ), - # String field - ( - # check name - {"name": "adam"}, - [1], - ), - # Boolean fields - ( - {"is_active": True}, - [1, 3], - ), - ( - {"is_active": False}, - [2], - ), - # And semantics for top level filtering - ( - {"id": 1, "is_active": True}, - [1], - ), - ( - {"id": 1, "is_active": False}, - [], - ), -] - -TYPE_2_FILTERING_TEST_CASES = [ - # These involve equality checks and other operators - # like $ne, $gt, $gte, $lt, $lte, $not - ( - {"id": 1}, - [1], - ), - ( - {"id": {"$ne": 1}}, - [2, 3], - ), - ( - {"id": {"$gt": 1}}, - [2, 3], - ), - ( - {"id": {"$gte": 1}}, - [1, 2, 3], - ), - ( - {"id": {"$lt": 1}}, - [], - ), - ( - {"id": {"$lte": 1}}, - [1], - ), - # Repeat all the same tests with name (string column) - ( - {"name": "adam"}, - [1], - ), - ( - {"name": "bob"}, - [2], - ), - ( - {"name": {"$eq": "adam"}}, - [1], - ), - ( - {"name": {"$ne": "adam"}}, - [2, 3], - ), - # And also gt, gte, lt, lte relying on lexicographical ordering - ( - {"name": {"$gt": "jane"}}, - [], - ), - ( - {"name": {"$gte": "jane"}}, - [3], - ), - ( - {"name": {"$lt": "jane"}}, - [1, 2], - ), - ( - {"name": {"$lte": "jane"}}, - [1, 2, 3], - ), - ( - {"is_active": {"$eq": True}}, - [1, 3], - ), - ( - {"is_active": {"$ne": True}}, - [2], - ), - # Test float column. - ( - {"height": {"$gt": 5.0}}, - [1, 2], - ), - ( - {"height": {"$gte": 5.0}}, - [1, 2], - ), - ( - {"height": {"$lt": 5.0}}, - [3], - ), - ( - {"height": {"$lte": 5.8}}, - [2, 3], - ), -] - -TYPE_3_FILTERING_TEST_CASES = [ - # These involve usage of AND and OR operators - ( - {"$or": [{"id": 1}, {"id": 2}]}, - [1, 2], - ), - ( - {"$or": [{"id": 1}, {"name": "bob"}]}, - [1, 2], - ), - ( - {"$and": [{"id": 1}, {"id": 2}]}, - [], - ), - ( - {"$or": [{"id": 1}, {"id": 2}, {"id": 3}]}, - [1, 2, 3], - ), -] - -TYPE_4_FILTERING_TEST_CASES = [ - # These involve special operators like $in, $nin, $between - # Test between - ( - {"id": {"$between": (1, 2)}}, - [1, 2], - ), - ( - {"id": {"$between": (1, 1)}}, - [1], - ), - ( - {"name": {"$in": ["adam", "bob"]}}, - [1, 2], - ), -] - -TYPE_5_FILTERING_TEST_CASES = [ - # These involve special operators like $like, $ilike that - # may be specified to certain databases. - ( - {"name": {"$like": "a%"}}, - [1], - ), - ( - {"name": {"$like": "%a%"}}, # adam and jane - [1, 3], - ), -] diff --git a/libs/partners/postgres/tests/integration_tests/test_chat_histories.py b/libs/partners/postgres/tests/integration_tests/test_chat_histories.py deleted file mode 100644 index 187ec2a0f6..0000000000 --- a/libs/partners/postgres/tests/integration_tests/test_chat_histories.py +++ /dev/null @@ -1,123 +0,0 @@ -import uuid - -from langchain_core.messages import AIMessage, HumanMessage, SystemMessage - -from langchain_postgres.chat_message_histories import PostgresChatMessageHistory -from tests.utils import asyncpg_client, syncpg_client - - -def test_sync_chat_history() -> None: - table_name = "chat_history" - session_id = str(uuid.UUID(int=123)) - with syncpg_client() as sync_connection: - PostgresChatMessageHistory.drop_table(sync_connection, table_name) - PostgresChatMessageHistory.create_schema(sync_connection, table_name) - - chat_history = PostgresChatMessageHistory( - table_name, session_id, sync_connection=sync_connection - ) - - messages = chat_history.messages - assert messages == [] - - assert chat_history is not None - - # Get messages from the chat history - messages = chat_history.messages - assert messages == [] - - chat_history.add_messages( - [ - SystemMessage(content="Meow"), - AIMessage(content="woof"), - HumanMessage(content="bark"), - ] - ) - - # Get messages from the chat history - messages = chat_history.messages - assert len(messages) == 3 - assert messages == [ - SystemMessage(content="Meow"), - AIMessage(content="woof"), - HumanMessage(content="bark"), - ] - - chat_history.add_messages( - [ - SystemMessage(content="Meow"), - AIMessage(content="woof"), - HumanMessage(content="bark"), - ] - ) - - messages = chat_history.messages - assert len(messages) == 6 - assert messages == [ - SystemMessage(content="Meow"), - AIMessage(content="woof"), - HumanMessage(content="bark"), - SystemMessage(content="Meow"), - AIMessage(content="woof"), - HumanMessage(content="bark"), - ] - - chat_history.clear() - assert chat_history.messages == [] - - -async def test_async_chat_history() -> None: - """Test the async chat history.""" - async with asyncpg_client() as async_connection: - table_name = "chat_history" - session_id = str(uuid.UUID(int=125)) - await PostgresChatMessageHistory.adrop_table(async_connection, table_name) - await PostgresChatMessageHistory.acreate_schema(async_connection, table_name) - - chat_history = PostgresChatMessageHistory( - table_name, session_id, async_connection=async_connection - ) - - messages = await chat_history.aget_messages() - assert messages == [] - - # Add messages - await chat_history.aadd_messages( - [ - SystemMessage(content="Meow"), - AIMessage(content="woof"), - HumanMessage(content="bark"), - ] - ) - # Get the messages - messages = await chat_history.aget_messages() - assert len(messages) == 3 - assert messages == [ - SystemMessage(content="Meow"), - AIMessage(content="woof"), - HumanMessage(content="bark"), - ] - - # Add more messages - await chat_history.aadd_messages( - [ - SystemMessage(content="Meow"), - AIMessage(content="woof"), - HumanMessage(content="bark"), - ] - ) - # Get the messages - messages = await chat_history.aget_messages() - assert len(messages) == 6 - assert messages == [ - SystemMessage(content="Meow"), - AIMessage(content="woof"), - HumanMessage(content="bark"), - SystemMessage(content="Meow"), - AIMessage(content="woof"), - HumanMessage(content="bark"), - ] - - # clear - await chat_history.aclear() - assert await chat_history.aget_messages() == [] diff --git a/libs/partners/postgres/tests/integration_tests/test_checkpointer.py b/libs/partners/postgres/tests/integration_tests/test_checkpointer.py deleted file mode 100644 index 1179d8b8f7..0000000000 --- a/libs/partners/postgres/tests/integration_tests/test_checkpointer.py +++ /dev/null @@ -1,326 +0,0 @@ -from collections import defaultdict - -from langgraph.checkpoint import Checkpoint -from langgraph.checkpoint.base import CheckpointTuple - -from langchain_postgres.checkpoint import PickleCheckpointSerializer, PostgresCheckpoint -from tests.utils import asyncpg_client, syncpg_client - - -async def test_async_checkpoint() -> None: - """Test the async chat history.""" - async with asyncpg_client() as async_connection: - await PostgresCheckpoint.adrop_schema(async_connection) - await PostgresCheckpoint.acreate_schema(async_connection) - checkpoint_saver = PostgresCheckpoint( - async_connection=async_connection, serializer=PickleCheckpointSerializer() - ) - checkpoint_tuple = [ - c - async for c in checkpoint_saver.alist( - { - "configurable": { - "thread_id": "test_thread", - } - } - ) - ] - assert len(checkpoint_tuple) == 0 - - # Add a checkpoint - sample_checkpoint: Checkpoint = { - "v": 1, - "ts": "2021-09-01T00:00:00+00:00", - "channel_values": {}, - "channel_versions": defaultdict(), - "versions_seen": defaultdict(), - } - - await checkpoint_saver.aput( - { - "configurable": { - "thread_id": "test_thread", - } - }, - sample_checkpoint, - ) - - checkpoints = [ - c - async for c in checkpoint_saver.alist( - { - "configurable": { - "thread_id": "test_thread", - } - } - ) - ] - - assert len(checkpoints) == 1 - assert checkpoints[0].checkpoint == sample_checkpoint - - # Add another checkpoint - sample_checkpoint2: Checkpoint = { - "v": 1, - "ts": "2021-09-02T00:00:00+00:00", - "channel_values": {}, - "channel_versions": defaultdict(), - "versions_seen": defaultdict(), - } - - await checkpoint_saver.aput( - { - "configurable": { - "thread_id": "test_thread", - } - }, - sample_checkpoint2, - ) - - # Try aget - checkpoints = [ - c - async for c in checkpoint_saver.alist( - { - "configurable": { - "thread_id": "test_thread", - } - } - ) - ] - - assert len(checkpoints) == 2 - # Should be sorted by timestamp desc - assert checkpoints[0].checkpoint == sample_checkpoint2 - assert checkpoints[1].checkpoint == sample_checkpoint - - assert await checkpoint_saver.aget_tuple( - { - "configurable": { - "thread_id": "test_thread", - } - } - ) == CheckpointTuple( - config={ - "configurable": { - "thread_id": "test_thread", - "thread_ts": "2021-09-02T00:00:00+00:00", - } - }, - checkpoint={ - "v": 1, - "ts": "2021-09-02T00:00:00+00:00", - "channel_values": {}, - "channel_versions": {}, # type: ignore - "versions_seen": {}, # type: ignore - }, - parent_config=None, - ) - - # Check aget_tuple with thread_ts - assert await checkpoint_saver.aget_tuple( - { - "configurable": { - "thread_id": "test_thread", - "thread_ts": "2021-09-01T00:00:00+00:00", - } - } - ) == CheckpointTuple( - config={ - "configurable": { - "thread_id": "test_thread", - "thread_ts": "2021-09-01T00:00:00+00:00", - } - }, - checkpoint={ - "v": 1, - "ts": "2021-09-01T00:00:00+00:00", - "channel_values": {}, - "channel_versions": {}, # type: ignore - "versions_seen": {}, # type: ignore - }, - parent_config=None, - ) - - -def test_sync_checkpoint() -> None: - """Test the sync check point implementation.""" - with syncpg_client() as sync_connection: - PostgresCheckpoint.drop_schema(sync_connection) - PostgresCheckpoint.create_schema(sync_connection) - checkpoint_saver = PostgresCheckpoint( - sync_connection=sync_connection, serializer=PickleCheckpointSerializer() - ) - checkpoint_tuple = [ - c - for c in checkpoint_saver.list( - { - "configurable": { - "thread_id": "test_thread", - } - } - ) - ] - assert len(checkpoint_tuple) == 0 - - # Add a checkpoint - sample_checkpoint: Checkpoint = { - "v": 1, - "ts": "2021-09-01T00:00:00+00:00", - "channel_values": {}, - "channel_versions": defaultdict(), - "versions_seen": defaultdict(), - } - - checkpoint_saver.put( - { - "configurable": { - "thread_id": "test_thread", - } - }, - sample_checkpoint, - ) - - checkpoints = [ - c - for c in checkpoint_saver.list( - { - "configurable": { - "thread_id": "test_thread", - } - } - ) - ] - - assert len(checkpoints) == 1 - assert checkpoints[0].checkpoint == sample_checkpoint - - # Add another checkpoint - sample_checkpoint_2: Checkpoint = { - "v": 1, - "ts": "2021-09-02T00:00:00+00:00", - "channel_values": {}, - "channel_versions": defaultdict(), - "versions_seen": defaultdict(), - } - - checkpoint_saver.put( - { - "configurable": { - "thread_id": "test_thread", - } - }, - sample_checkpoint_2, - ) - - # Try aget - checkpoints = [ - c - for c in checkpoint_saver.list( - { - "configurable": { - "thread_id": "test_thread", - } - } - ) - ] - - assert len(checkpoints) == 2 - # Should be sorted by timestamp desc - assert checkpoints[0].checkpoint == sample_checkpoint_2 - assert checkpoints[1].checkpoint == sample_checkpoint - - assert checkpoint_saver.get_tuple( - { - "configurable": { - "thread_id": "test_thread", - } - } - ) == CheckpointTuple( - config={ - "configurable": { - "thread_id": "test_thread", - "thread_ts": "2021-09-02T00:00:00+00:00", - } - }, - checkpoint={ - "v": 1, - "ts": "2021-09-02T00:00:00+00:00", - "channel_values": {}, - "channel_versions": defaultdict(), - "versions_seen": defaultdict(), - }, - parent_config=None, - ) - - -async def test_on_conflict_aput() -> None: - async with asyncpg_client() as async_connection: - await PostgresCheckpoint.adrop_schema(async_connection) - await PostgresCheckpoint.acreate_schema(async_connection) - checkpoint_saver = PostgresCheckpoint( - async_connection=async_connection, serializer=PickleCheckpointSerializer() - ) - - # aput with twice on the same (thread_id, thread_ts) should not raise any error - sample_checkpoint: Checkpoint = { - "v": 1, - "ts": "2021-09-01T00:00:00+00:00", - "channel_values": {}, - "channel_versions": defaultdict(), - "versions_seen": defaultdict(), - } - new_checkpoint: Checkpoint = { - "v": 2, - "ts": "2021-09-01T00:00:00+00:00", - "channel_values": {}, - "channel_versions": defaultdict(), - "versions_seen": defaultdict(), - } - await checkpoint_saver.aput( - { - "configurable": { - "thread_id": "test_thread", - "thread_ts": "2021-09-01T00:00:00+00:00", - } - }, - sample_checkpoint, - ) - await checkpoint_saver.aput( - { - "configurable": { - "thread_id": "test_thread", - "thread_ts": "2021-09-01T00:00:00+00:00", - } - }, - new_checkpoint, - ) - # Check aget_tuple with thread_ts - assert await checkpoint_saver.aget_tuple( - { - "configurable": { - "thread_id": "test_thread", - "thread_ts": "2021-09-01T00:00:00+00:00", - } - } - ) == CheckpointTuple( - config={ - "configurable": { - "thread_id": "test_thread", - "thread_ts": "2021-09-01T00:00:00+00:00", - } - }, - checkpoint={ - "v": 2, - "ts": "2021-09-01T00:00:00+00:00", - "channel_values": {}, - "channel_versions": defaultdict(None, {}), - "versions_seen": defaultdict(None, {}), - }, - parent_config={ - "configurable": { - "thread_id": "test_thread", - "thread_ts": "2021-09-01T00:00:00+00:00", - } - }, - ) diff --git a/libs/partners/postgres/tests/integration_tests/test_compile.py b/libs/partners/postgres/tests/integration_tests/test_compile.py deleted file mode 100644 index 33ecccdfa0..0000000000 --- a/libs/partners/postgres/tests/integration_tests/test_compile.py +++ /dev/null @@ -1,7 +0,0 @@ -import pytest - - -@pytest.mark.compile -def test_placeholder() -> None: - """Used for compiling integration tests without running any real tests.""" - pass diff --git a/libs/partners/postgres/tests/integration_tests/test_vectorstore.py b/libs/partners/postgres/tests/integration_tests/test_vectorstore.py deleted file mode 100644 index 2a89103d35..0000000000 --- a/libs/partners/postgres/tests/integration_tests/test_vectorstore.py +++ /dev/null @@ -1,505 +0,0 @@ -"""Test PGVector functionality.""" - -import os -from typing import Any, Dict, Generator, List - -import pytest -import sqlalchemy -from langchain_core.documents import Document -from sqlalchemy.orm import Session - -from langchain_postgres.vectorstores import ( - SUPPORTED_OPERATORS, - PGVector, -) -from tests.integration_tests.fake_embeddings import FakeEmbeddings -from tests.integration_tests.fixtures.filtering_test_cases import ( - DOCUMENTS, - TYPE_1_FILTERING_TEST_CASES, - TYPE_2_FILTERING_TEST_CASES, - TYPE_3_FILTERING_TEST_CASES, - TYPE_4_FILTERING_TEST_CASES, - TYPE_5_FILTERING_TEST_CASES, -) - -# The connection string matches the default settings in the docker-compose file -# located in the root of the repository: [root]/docker/docker-compose.yml -# Non-standard ports are used to avoid conflicts with other local postgres -# instances. -# To spin up postgres with the pgvector extension: -# cd [root]/docker/docker-compose.yml -# docker compose up pgvector -CONNECTION_STRING = PGVector.connection_string_from_db_params( - driver=os.environ.get("TEST_PGVECTOR_DRIVER", "psycopg"), - host=os.environ.get("TEST_PGVECTOR_HOST", "localhost"), - port=int(os.environ.get("TEST_PGVECTOR_PORT", "6024")), - database=os.environ.get("TEST_PGVECTOR_DATABASE", "langchain"), - user=os.environ.get("TEST_PGVECTOR_USER", "langchain"), - password=os.environ.get("TEST_PGVECTOR_PASSWORD", "langchain"), -) - -ADA_TOKEN_COUNT = 1536 - - -class FakeEmbeddingsWithAdaDimension(FakeEmbeddings): - """Fake embeddings functionality for testing.""" - - def embed_documents(self, texts: List[str]) -> List[List[float]]: - """Return simple embeddings.""" - return [ - [float(1.0)] * (ADA_TOKEN_COUNT - 1) + [float(i)] for i in range(len(texts)) - ] - - def embed_query(self, text: str) -> List[float]: - """Return simple embeddings.""" - return [float(1.0)] * (ADA_TOKEN_COUNT - 1) + [float(0.0)] - - -def test_pgvector(pgvector: PGVector) -> None: - """Test end to end construction and search.""" - texts = ["foo", "bar", "baz"] - docsearch = PGVector.from_texts( - texts=texts, - collection_name="test_collection", - embedding=FakeEmbeddingsWithAdaDimension(), - connection_string=CONNECTION_STRING, - pre_delete_collection=True, - ) - output = docsearch.similarity_search("foo", k=1) - assert output == [Document(page_content="foo")] - - -def test_pgvector_embeddings() -> None: - """Test end to end construction with embeddings and search.""" - texts = ["foo", "bar", "baz"] - text_embeddings = FakeEmbeddingsWithAdaDimension().embed_documents(texts) - text_embedding_pairs = list(zip(texts, text_embeddings)) - docsearch = PGVector.from_embeddings( - text_embeddings=text_embedding_pairs, - collection_name="test_collection", - embedding=FakeEmbeddingsWithAdaDimension(), - connection_string=CONNECTION_STRING, - pre_delete_collection=True, - ) - output = docsearch.similarity_search("foo", k=1) - assert output == [Document(page_content="foo")] - - -def test_pgvector_with_metadatas() -> None: - """Test end to end construction and search.""" - texts = ["foo", "bar", "baz"] - metadatas = [{"page": str(i)} for i in range(len(texts))] - docsearch = PGVector.from_texts( - texts=texts, - collection_name="test_collection", - embedding=FakeEmbeddingsWithAdaDimension(), - metadatas=metadatas, - connection_string=CONNECTION_STRING, - pre_delete_collection=True, - ) - output = docsearch.similarity_search("foo", k=1) - assert output == [Document(page_content="foo", metadata={"page": "0"})] - - -def test_pgvector_with_metadatas_with_scores() -> None: - """Test end to end construction and search.""" - texts = ["foo", "bar", "baz"] - metadatas = [{"page": str(i)} for i in range(len(texts))] - docsearch = PGVector.from_texts( - texts=texts, - collection_name="test_collection", - embedding=FakeEmbeddingsWithAdaDimension(), - metadatas=metadatas, - connection_string=CONNECTION_STRING, - pre_delete_collection=True, - ) - output = docsearch.similarity_search_with_score("foo", k=1) - assert output == [(Document(page_content="foo", metadata={"page": "0"}), 0.0)] - - -def test_pgvector_with_filter_match() -> None: - """Test end to end construction and search.""" - texts = ["foo", "bar", "baz"] - metadatas = [{"page": str(i)} for i in range(len(texts))] - docsearch = PGVector.from_texts( - texts=texts, - collection_name="test_collection_filter", - embedding=FakeEmbeddingsWithAdaDimension(), - metadatas=metadatas, - connection_string=CONNECTION_STRING, - pre_delete_collection=True, - ) - output = docsearch.similarity_search_with_score("foo", k=1, filter={"page": "0"}) - assert output == [(Document(page_content="foo", metadata={"page": "0"}), 0.0)] - - -def test_pgvector_with_filter_distant_match() -> None: - """Test end to end construction and search.""" - texts = ["foo", "bar", "baz"] - metadatas = [{"page": str(i)} for i in range(len(texts))] - docsearch = PGVector.from_texts( - texts=texts, - collection_name="test_collection_filter", - embedding=FakeEmbeddingsWithAdaDimension(), - metadatas=metadatas, - connection_string=CONNECTION_STRING, - pre_delete_collection=True, - ) - output = docsearch.similarity_search_with_score("foo", k=1, filter={"page": "2"}) - assert output == [ - (Document(page_content="baz", metadata={"page": "2"}), 0.0013003906671379406) - ] - - -def test_pgvector_with_filter_no_match() -> None: - """Test end to end construction and search.""" - texts = ["foo", "bar", "baz"] - metadatas = [{"page": str(i)} for i in range(len(texts))] - docsearch = PGVector.from_texts( - texts=texts, - collection_name="test_collection_filter", - embedding=FakeEmbeddingsWithAdaDimension(), - metadatas=metadatas, - connection_string=CONNECTION_STRING, - pre_delete_collection=True, - ) - output = docsearch.similarity_search_with_score("foo", k=1, filter={"page": "5"}) - assert output == [] - - -def test_pgvector_collection_with_metadata() -> None: - """Test end to end collection construction""" - pgvector = PGVector( - collection_name="test_collection", - collection_metadata={"foo": "bar"}, - embedding_function=FakeEmbeddingsWithAdaDimension(), - connection_string=CONNECTION_STRING, - pre_delete_collection=True, - ) - session = Session(pgvector._create_engine()) - collection = pgvector.get_collection(session) - if collection is None: - assert False, "Expected a CollectionStore object but received None" - else: - assert collection.name == "test_collection" - assert collection.cmetadata == {"foo": "bar"} - - -def test_pgvector_with_filter_in_set() -> None: - """Test end to end construction and search.""" - texts = ["foo", "bar", "baz"] - metadatas = [{"page": str(i)} for i in range(len(texts))] - docsearch = PGVector.from_texts( - texts=texts, - collection_name="test_collection_filter", - embedding=FakeEmbeddingsWithAdaDimension(), - metadatas=metadatas, - connection_string=CONNECTION_STRING, - pre_delete_collection=True, - ) - output = docsearch.similarity_search_with_score( - "foo", k=2, filter={"page": {"IN": ["0", "2"]}} - ) - assert output == [ - (Document(page_content="foo", metadata={"page": "0"}), 0.0), - (Document(page_content="baz", metadata={"page": "2"}), 0.0013003906671379406), - ] - - -def test_pgvector_with_filter_nin_set() -> None: - """Test end to end construction and search.""" - texts = ["foo", "bar", "baz"] - metadatas = [{"page": str(i)} for i in range(len(texts))] - docsearch = PGVector.from_texts( - texts=texts, - collection_name="test_collection_filter", - embedding=FakeEmbeddingsWithAdaDimension(), - metadatas=metadatas, - connection_string=CONNECTION_STRING, - pre_delete_collection=True, - ) - output = docsearch.similarity_search_with_score( - "foo", k=2, filter={"page": {"NIN": ["1"]}} - ) - assert output == [ - (Document(page_content="foo", metadata={"page": "0"}), 0.0), - (Document(page_content="baz", metadata={"page": "2"}), 0.0013003906671379406), - ] - - -def test_pgvector_delete_docs() -> None: - """Add and delete documents.""" - texts = ["foo", "bar", "baz"] - metadatas = [{"page": str(i)} for i in range(len(texts))] - docsearch = PGVector.from_texts( - texts=texts, - collection_name="test_collection_filter", - embedding=FakeEmbeddingsWithAdaDimension(), - metadatas=metadatas, - ids=["1", "2", "3"], - connection_string=CONNECTION_STRING, - pre_delete_collection=True, - ) - docsearch.delete(["1", "2"]) - with docsearch._make_session() as session: - records = list(session.query(docsearch.EmbeddingStore).all()) - # ignoring type error since mypy cannot determine whether - # the list is sortable - assert sorted(record.custom_id for record in records) == ["3"] # type: ignore - - docsearch.delete(["2", "3"]) # Should not raise on missing ids - with docsearch._make_session() as session: - records = list(session.query(docsearch.EmbeddingStore).all()) - # ignoring type error since mypy cannot determine whether - # the list is sortable - assert sorted(record.custom_id for record in records) == [] # type: ignore - - -def test_pgvector_relevance_score() -> None: - """Test to make sure the relevance score is scaled to 0-1.""" - texts = ["foo", "bar", "baz"] - metadatas = [{"page": str(i)} for i in range(len(texts))] - docsearch = PGVector.from_texts( - texts=texts, - collection_name="test_collection", - embedding=FakeEmbeddingsWithAdaDimension(), - metadatas=metadatas, - connection_string=CONNECTION_STRING, - pre_delete_collection=True, - ) - - output = docsearch.similarity_search_with_relevance_scores("foo", k=3) - assert output == [ - (Document(page_content="foo", metadata={"page": "0"}), 1.0), - (Document(page_content="bar", metadata={"page": "1"}), 0.9996744261675065), - (Document(page_content="baz", metadata={"page": "2"}), 0.9986996093328621), - ] - - -def test_pgvector_retriever_search_threshold() -> None: - """Test using retriever for searching with threshold.""" - texts = ["foo", "bar", "baz"] - metadatas = [{"page": str(i)} for i in range(len(texts))] - docsearch = PGVector.from_texts( - texts=texts, - collection_name="test_collection", - embedding=FakeEmbeddingsWithAdaDimension(), - metadatas=metadatas, - connection_string=CONNECTION_STRING, - pre_delete_collection=True, - ) - - retriever = docsearch.as_retriever( - search_type="similarity_score_threshold", - search_kwargs={"k": 3, "score_threshold": 0.999}, - ) - output = retriever.get_relevant_documents("summer") - assert output == [ - Document(page_content="foo", metadata={"page": "0"}), - Document(page_content="bar", metadata={"page": "1"}), - ] - - -def test_pgvector_retriever_search_threshold_custom_normalization_fn() -> None: - """Test searching with threshold and custom normalization function""" - texts = ["foo", "bar", "baz"] - metadatas = [{"page": str(i)} for i in range(len(texts))] - docsearch = PGVector.from_texts( - texts=texts, - collection_name="test_collection", - embedding=FakeEmbeddingsWithAdaDimension(), - metadatas=metadatas, - connection_string=CONNECTION_STRING, - pre_delete_collection=True, - relevance_score_fn=lambda d: d * 0, - ) - - retriever = docsearch.as_retriever( - search_type="similarity_score_threshold", - search_kwargs={"k": 3, "score_threshold": 0.5}, - ) - output = retriever.get_relevant_documents("foo") - assert output == [] - - -def test_pgvector_max_marginal_relevance_search() -> None: - """Test max marginal relevance search.""" - texts = ["foo", "bar", "baz"] - docsearch = PGVector.from_texts( - texts=texts, - collection_name="test_collection", - embedding=FakeEmbeddingsWithAdaDimension(), - connection_string=CONNECTION_STRING, - pre_delete_collection=True, - ) - output = docsearch.max_marginal_relevance_search("foo", k=1, fetch_k=3) - assert output == [Document(page_content="foo")] - - -def test_pgvector_max_marginal_relevance_search_with_score() -> None: - """Test max marginal relevance search with relevance scores.""" - texts = ["foo", "bar", "baz"] - docsearch = PGVector.from_texts( - texts=texts, - collection_name="test_collection", - embedding=FakeEmbeddingsWithAdaDimension(), - connection_string=CONNECTION_STRING, - pre_delete_collection=True, - ) - output = docsearch.max_marginal_relevance_search_with_score("foo", k=1, fetch_k=3) - assert output == [(Document(page_content="foo"), 0.0)] - - -def test_pgvector_with_custom_connection() -> None: - """Test construction using a custom connection.""" - texts = ["foo", "bar", "baz"] - engine = sqlalchemy.create_engine(CONNECTION_STRING) - with engine.connect() as connection: - docsearch = PGVector.from_texts( - texts=texts, - collection_name="test_collection", - embedding=FakeEmbeddingsWithAdaDimension(), - connection_string=CONNECTION_STRING, - pre_delete_collection=True, - connection=connection, - ) - output = docsearch.similarity_search("foo", k=1) - assert output == [Document(page_content="foo")] - - -def test_pgvector_with_custom_engine_args() -> None: - """Test construction using custom engine arguments.""" - texts = ["foo", "bar", "baz"] - engine_args = { - "pool_size": 5, - "max_overflow": 10, - "pool_recycle": -1, - "pool_use_lifo": False, - "pool_pre_ping": False, - "pool_timeout": 30, - } - docsearch = PGVector.from_texts( - texts=texts, - collection_name="test_collection", - embedding=FakeEmbeddingsWithAdaDimension(), - connection_string=CONNECTION_STRING, - pre_delete_collection=True, - engine_args=engine_args, - ) - output = docsearch.similarity_search("foo", k=1) - assert output == [Document(page_content="foo")] - - -# We should reuse this test-case across other integrations -# Add database fixture using pytest -@pytest.fixture -def pgvector() -> Generator[PGVector, None, None]: - """Create a PGVector instance.""" - store = PGVector.from_documents( - documents=DOCUMENTS, - collection_name="test_collection", - embedding=FakeEmbeddingsWithAdaDimension(), - connection_string=CONNECTION_STRING, - pre_delete_collection=True, - relevance_score_fn=lambda d: d * 0, - use_jsonb=True, - ) - try: - yield store - # Do clean up - finally: - store.drop_tables() - - -@pytest.mark.parametrize("test_filter, expected_ids", TYPE_1_FILTERING_TEST_CASES[:1]) -def test_pgvector_with_with_metadata_filters_1( - pgvector: PGVector, - test_filter: Dict[str, Any], - expected_ids: List[int], -) -> None: - """Test end to end construction and search.""" - docs = pgvector.similarity_search("meow", k=5, filter=test_filter) - assert [doc.metadata["id"] for doc in docs] == expected_ids, test_filter - - -@pytest.mark.parametrize("test_filter, expected_ids", TYPE_2_FILTERING_TEST_CASES) -def test_pgvector_with_with_metadata_filters_2( - pgvector: PGVector, - test_filter: Dict[str, Any], - expected_ids: List[int], -) -> None: - """Test end to end construction and search.""" - docs = pgvector.similarity_search("meow", k=5, filter=test_filter) - assert [doc.metadata["id"] for doc in docs] == expected_ids, test_filter - - -@pytest.mark.parametrize("test_filter, expected_ids", TYPE_3_FILTERING_TEST_CASES) -def test_pgvector_with_with_metadata_filters_3( - pgvector: PGVector, - test_filter: Dict[str, Any], - expected_ids: List[int], -) -> None: - """Test end to end construction and search.""" - docs = pgvector.similarity_search("meow", k=5, filter=test_filter) - assert [doc.metadata["id"] for doc in docs] == expected_ids, test_filter - - -@pytest.mark.parametrize("test_filter, expected_ids", TYPE_4_FILTERING_TEST_CASES) -def test_pgvector_with_with_metadata_filters_4( - pgvector: PGVector, - test_filter: Dict[str, Any], - expected_ids: List[int], -) -> None: - """Test end to end construction and search.""" - docs = pgvector.similarity_search("meow", k=5, filter=test_filter) - assert [doc.metadata["id"] for doc in docs] == expected_ids, test_filter - - -@pytest.mark.parametrize("test_filter, expected_ids", TYPE_5_FILTERING_TEST_CASES) -def test_pgvector_with_with_metadata_filters_5( - pgvector: PGVector, - test_filter: Dict[str, Any], - expected_ids: List[int], -) -> None: - """Test end to end construction and search.""" - docs = pgvector.similarity_search("meow", k=5, filter=test_filter) - assert [doc.metadata["id"] for doc in docs] == expected_ids, test_filter - - -@pytest.mark.parametrize( - "invalid_filter", - [ - ["hello"], - { - "id": 2, - "$name": "foo", - }, - {"$or": {}}, - {"$and": {}}, - {"$between": {}}, - {"$eq": {}}, - ], -) -def test_invalid_filters(pgvector: PGVector, invalid_filter: Any) -> None: - """Verify that invalid filters raise an error.""" - with pytest.raises(ValueError): - pgvector._create_filter_clause(invalid_filter) - - -def test_validate_operators() -> None: - """Verify that all operators have been categorized.""" - assert sorted(SUPPORTED_OPERATORS) == [ - "$and", - "$between", - "$eq", - "$gt", - "$gte", - "$ilike", - "$in", - "$like", - "$lt", - "$lte", - "$ne", - "$nin", - "$or", - ] diff --git a/libs/partners/postgres/tests/unit_tests/__init__.py b/libs/partners/postgres/tests/unit_tests/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/libs/partners/postgres/tests/unit_tests/test_imports.py b/libs/partners/postgres/tests/unit_tests/test_imports.py deleted file mode 100644 index 761a273c1d..0000000000 --- a/libs/partners/postgres/tests/unit_tests/test_imports.py +++ /dev/null @@ -1,14 +0,0 @@ -from langchain_postgres import __all__ - -EXPECTED_ALL = [ - "__version__", - "CheckpointSerializer", - "PostgresChatMessageHistory", - "PostgresCheckpoint", - "PickleCheckpointSerializer", -] - - -def test_all_imports() -> None: - """Test that __all__ is correctly defined.""" - assert sorted(EXPECTED_ALL) == sorted(__all__) diff --git a/libs/partners/postgres/tests/utils.py b/libs/partners/postgres/tests/utils.py deleted file mode 100644 index 97313008e5..0000000000 --- a/libs/partners/postgres/tests/utils.py +++ /dev/null @@ -1,42 +0,0 @@ -"""Get fixtures for the database connection.""" -import os -from contextlib import asynccontextmanager, contextmanager - -import psycopg -from typing_extensions import AsyncGenerator, Generator - -PG_USER = os.environ.get("PG_USER", "langchain") -PG_HOST = os.environ.get("PG_HOST", "localhost") -PG_PASSWORD = os.environ.get("PG_PASSWORD", "langchain") -PG_DATABASE = os.environ.get("PG_DATABASE", "langchain") - -# Using a different port for testing than the default 5432 -# to avoid conflicts with a running PostgreSQL instance -# This port matches the convention in langchain/docker/docker-compose.yml -# To spin up a PostgreSQL instance for testing, run: -# docker-compose -f docker/docker-compose.yml up -d postgres -PG_PORT = os.environ.get("PG_PORT", "6023") - -DSN = f"postgresql://{PG_USER}:{PG_PASSWORD}@{PG_HOST}:{PG_PORT}/{PG_DATABASE}" - - -@asynccontextmanager -async def asyncpg_client() -> AsyncGenerator[psycopg.AsyncConnection, None]: - # Establish a connection to your test database - conn = await psycopg.AsyncConnection.connect(conninfo=DSN) - try: - yield conn - finally: - # Cleanup: close the connection after the test is done - await conn.close() - - -@contextmanager -def syncpg_client() -> Generator[psycopg.Connection, None, None]: - # Establish a connection to your test database - conn = psycopg.connect(conninfo=DSN) - try: - yield conn - finally: - # Cleanup: close the connection after the test is done - conn.close()