From f0d5b599623ca2477c97c95a6a5cecb7fc835d07 Mon Sep 17 00:00:00 2001 From: Erick Friis Date: Tue, 2 Apr 2024 13:28:10 -0700 Subject: [PATCH] core[patch]: remove requests (#19891) Removes required usage of `requests` from `langchain-core`, all of which has been deprecated. - removes Tracer V1 implementations - removes old `try_load_from_hub` github-based hub implementations Removal done in a way where imports will still succeed, and usage will fail with a `RuntimeError`. --- .../callbacks/test_langchain_tracer.py | 41 +- libs/core/langchain_core/callbacks/manager.py | 30 +- libs/core/langchain_core/prompts/loading.py | 15 +- libs/core/langchain_core/tracers/context.py | 42 +- .../langchain_core/tracers/langchain_v1.py | 193 +----- libs/core/langchain_core/utils/loading.py | 69 +-- libs/core/poetry.lock | 138 ++--- libs/core/pyproject.toml | 1 - .../unit_tests/tracers/test_langchain_v1.py | 562 ------------------ .../tests/unit_tests/utils/test_loading.py | 106 ---- .../langchain_experimental/prompts/load.py | 53 +- libs/langchain/langchain/agents/loading.py | 16 +- libs/langchain/langchain/chains/loading.py | 15 +- 13 files changed, 137 insertions(+), 1144 deletions(-) delete mode 100644 libs/core/tests/unit_tests/tracers/test_langchain_v1.py delete mode 100644 libs/core/tests/unit_tests/utils/test_loading.py diff --git a/libs/community/tests/integration_tests/callbacks/test_langchain_tracer.py b/libs/community/tests/integration_tests/callbacks/test_langchain_tracer.py index d4941b6a37..51b2552139 100644 --- a/libs/community/tests/integration_tests/callbacks/test_langchain_tracer.py +++ b/libs/community/tests/integration_tests/callbacks/test_langchain_tracer.py @@ -1,11 +1,12 @@ """Integration tests for the langchain tracer module.""" + import asyncio import os from aiohttp import ClientSession from langchain_core.callbacks.manager import atrace_as_chain_group, trace_as_chain_group from langchain_core.prompts import PromptTemplate -from langchain_core.tracers.context import tracing_enabled, tracing_v2_enabled +from langchain_core.tracers.context import tracing_v2_enabled from langchain_community.chat_models import ChatOpenAI from langchain_community.llms import OpenAI @@ -95,44 +96,6 @@ async def test_tracing_concurrent_bw_compat_environ() -> None: del os.environ["LANGCHAIN_HANDLER"] -def test_tracing_context_manager() -> None: - from langchain.agents import AgentType, initialize_agent, load_tools - - llm = OpenAI(temperature=0) - tools = load_tools(["llm-math", "serpapi"], llm=llm) - agent = initialize_agent( - tools, llm, agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, verbose=True - ) - if "LANGCHAIN_TRACING" in os.environ: - del os.environ["LANGCHAIN_TRACING"] - with tracing_enabled() as session: - assert session - agent.run(questions[0]) # this should be traced - - agent.run(questions[0]) # this should not be traced - - -async def test_tracing_context_manager_async() -> None: - from langchain.agents import AgentType, initialize_agent, load_tools - - llm = OpenAI(temperature=0) - async_tools = load_tools(["llm-math", "serpapi"], llm=llm) - agent = initialize_agent( - async_tools, llm, agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, verbose=True - ) - if "LANGCHAIN_TRACING" in os.environ: - del os.environ["LANGCHAIN_TRACING"] - - # start a background task - task = asyncio.create_task(agent.arun(questions[0])) # this should not be traced - with tracing_enabled() as session: - assert session - tasks = [agent.arun(q) for q in questions[1:4]] # these should be traced - await asyncio.gather(*tasks) - - await task - - async def test_tracing_v2_environment_variable() -> None: from langchain.agents import AgentType, initialize_agent, load_tools diff --git a/libs/core/langchain_core/callbacks/manager.py b/libs/core/langchain_core/callbacks/manager.py index 27792bbf70..74f4d1a639 100644 --- a/libs/core/langchain_core/callbacks/manager.py +++ b/libs/core/langchain_core/callbacks/manager.py @@ -1914,7 +1914,6 @@ def _configure( _configure_hooks, _get_tracer_project, _tracing_v2_is_enabled, - tracing_callback_var, tracing_v2_callback_var, ) @@ -1953,20 +1952,25 @@ def _configure( callback_manager.add_metadata(inheritable_metadata or {}) callback_manager.add_metadata(local_metadata or {}, False) - tracer = tracing_callback_var.get() - tracing_enabled_ = ( - env_var_is_set("LANGCHAIN_TRACING") - or tracer is not None - or env_var_is_set("LANGCHAIN_HANDLER") + v1_tracing_enabled_ = env_var_is_set("LANGCHAIN_TRACING") or env_var_is_set( + "LANGCHAIN_HANDLER" ) tracer_v2 = tracing_v2_callback_var.get() tracing_v2_enabled_ = _tracing_v2_is_enabled() + + if v1_tracing_enabled_ and not tracing_v2_enabled_: + # if both are enabled, can silently ignore the v1 tracer + raise RuntimeError( + "Tracing using LangChainTracerV1 is no longer supported. " + "Please set the LANGCHAIN_TRACING_V2 environment variable to enable " + "tracing instead." + ) + tracer_project = _get_tracer_project() debug = _get_debug() - if verbose or debug or tracing_enabled_ or tracing_v2_enabled_: + if verbose or debug or tracing_v2_enabled_: from langchain_core.tracers.langchain import LangChainTracer - from langchain_core.tracers.langchain_v1 import LangChainTracerV1 from langchain_core.tracers.stdout import ConsoleCallbackHandler if verbose and not any( @@ -1982,16 +1986,6 @@ def _configure( for handler in callback_manager.handlers ): callback_manager.add_handler(ConsoleCallbackHandler(), True) - if tracing_enabled_ and not any( - isinstance(handler, LangChainTracerV1) - for handler in callback_manager.handlers - ): - if tracer: - callback_manager.add_handler(tracer, True) - else: - handler = LangChainTracerV1() - handler.load_session(tracer_project) - callback_manager.add_handler(handler, True) if tracing_v2_enabled_ and not any( isinstance(handler, LangChainTracer) for handler in callback_manager.handlers diff --git a/libs/core/langchain_core/prompts/loading.py b/libs/core/langchain_core/prompts/loading.py index baa56a5666..eeb2c57bfb 100644 --- a/libs/core/langchain_core/prompts/loading.py +++ b/libs/core/langchain_core/prompts/loading.py @@ -1,4 +1,5 @@ """Load prompts.""" + import json import logging from pathlib import Path @@ -11,7 +12,6 @@ from langchain_core.prompts.base import BasePromptTemplate from langchain_core.prompts.chat import ChatPromptTemplate from langchain_core.prompts.few_shot import FewShotPromptTemplate from langchain_core.prompts.prompt import PromptTemplate -from langchain_core.utils import try_load_from_hub URL_BASE = "https://raw.githubusercontent.com/hwchase17/langchain-hub/master/prompts/" logger = logging.getLogger(__name__) @@ -127,12 +127,13 @@ def _load_prompt(config: dict) -> PromptTemplate: def load_prompt(path: Union[str, Path]) -> BasePromptTemplate: """Unified method for loading a prompt from LangChainHub or local fs.""" - if hub_result := try_load_from_hub( - path, _load_prompt_from_file, "prompts", {"py", "json", "yaml"} - ): - return hub_result - else: - return _load_prompt_from_file(path) + if isinstance(path, str) and path.startswith("lc://"): + raise RuntimeError( + "Loading from the deprecated github-based Hub is no longer supported. " + "Please use the new LangChain Hub at https://smith.langchain.com/hub " + "instead." + ) + return _load_prompt_from_file(path) def _load_prompt_from_file(file: Union[str, Path]) -> BasePromptTemplate: diff --git a/libs/core/langchain_core/tracers/context.py b/libs/core/langchain_core/tracers/context.py index 566dde23a7..990049ee36 100644 --- a/libs/core/langchain_core/tracers/context.py +++ b/libs/core/langchain_core/tracers/context.py @@ -18,9 +18,7 @@ from uuid import UUID from langsmith import utils as ls_utils from langsmith.run_helpers import get_run_tree_context -from langchain_core._api import deprecated from langchain_core.tracers.langchain import LangChainTracer -from langchain_core.tracers.langchain_v1 import LangChainTracerV1 from langchain_core.tracers.run_collector import RunCollectorCallbackHandler from langchain_core.tracers.schemas import TracerSessionV1 from langchain_core.utils.env import env_var_is_set @@ -31,44 +29,24 @@ if TYPE_CHECKING: from langchain_core.callbacks.base import BaseCallbackHandler, Callbacks from langchain_core.callbacks.manager import AsyncCallbackManager, CallbackManager -# Deprecated as of 0.1.0, will be removed in 0.2.0. -tracing_callback_var: ContextVar[Optional[LangChainTracerV1]] = ContextVar( # noqa: E501 - "tracing_callback", default=None -) - -tracing_v2_callback_var: ContextVar[Optional[LangChainTracer]] = ContextVar( # noqa: E501 +# for backwards partial compatibility if this is imported by users but unused +tracing_callback_var: Any = None +tracing_v2_callback_var: ContextVar[Optional[LangChainTracer]] = ContextVar( "tracing_callback_v2", default=None -) -run_collector_var: ContextVar[Optional[RunCollectorCallbackHandler]] = ContextVar( # noqa: E501 +) # noqa: E501 +run_collector_var: ContextVar[Optional[RunCollectorCallbackHandler]] = ContextVar( "run_collector", default=None -) +) # noqa: E501 @contextmanager -@deprecated("0.1.0", alternative="tracing_v2_enabled", removal="0.2.0") def tracing_enabled( session_name: str = "default", ) -> Generator[TracerSessionV1, None, None]: - """Get the Deprecated LangChainTracer in a context manager. - - Args: - session_name (str, optional): The name of the session. - Defaults to "default". - - Returns: - TracerSessionV1: The LangChainTracer session. - - Example: - >>> with tracing_enabled() as session: - ... # Use the LangChainTracer session - """ - cb = LangChainTracerV1() - session = cast(TracerSessionV1, cb.load_session(session_name)) - try: - tracing_callback_var.set(cb) - yield session - finally: - tracing_callback_var.set(None) + """Throws an error because this has been replaced by tracing_v2_enabled.""" + raise RuntimeError( + "tracing_enabled is no longer supported. Please use tracing_enabled_v2 instead." + ) @contextmanager diff --git a/libs/core/langchain_core/tracers/langchain_v1.py b/libs/core/langchain_core/tracers/langchain_v1.py index 955c15c7d7..aac99a7206 100644 --- a/libs/core/langchain_core/tracers/langchain_v1.py +++ b/libs/core/langchain_core/tracers/langchain_v1.py @@ -1,187 +1,14 @@ -from __future__ import annotations - -import logging -import os -from typing import Any, Dict, Optional, Union - -import requests - -from langchain_core._api import deprecated -from langchain_core.messages import get_buffer_string -from langchain_core.tracers.base import BaseTracer -from langchain_core.tracers.schemas import ( - ChainRun, - LLMRun, - Run, - ToolRun, - TracerSession, - TracerSessionV1, - TracerSessionV1Base, -) -from langchain_core.utils import raise_for_status_with_text - -logger = logging.getLogger(__name__) +from typing import Any -def get_headers() -> Dict[str, Any]: - """Get the headers for the LangChain API.""" - headers: Dict[str, Any] = {"Content-Type": "application/json"} - if os.getenv("LANGCHAIN_API_KEY"): - headers["x-api-key"] = os.getenv("LANGCHAIN_API_KEY") - return headers +def get_headers(*args: Any, **kwargs: Any) -> Any: + raise RuntimeError( + "get_headers for LangChainTracerV1 is no longer supported. " + "Please use LangChainTracer instead." + ) -def _get_endpoint() -> str: - return os.getenv("LANGCHAIN_ENDPOINT", "http://localhost:8000") - - -@deprecated("0.1.0", alternative="LangChainTracer", removal="0.2.0") -class LangChainTracerV1(BaseTracer): - """Implementation of the SharedTracer that POSTS to the langchain endpoint.""" - - def __init__(self, **kwargs: Any) -> None: - """Initialize the LangChain tracer.""" - super().__init__(**kwargs) - self.session: Optional[TracerSessionV1] = None - self._endpoint = _get_endpoint() - self._headers = get_headers() - - def _convert_to_v1_run(self, run: Run) -> Union[LLMRun, ChainRun, ToolRun]: - session = self.session or self.load_default_session() - if not isinstance(session, TracerSessionV1): - raise ValueError( - "LangChainTracerV1 is not compatible with" - f" session of type {type(session)}" - ) - - if run.run_type == "llm": - if "prompts" in run.inputs: - prompts = run.inputs["prompts"] - elif "messages" in run.inputs: - prompts = [get_buffer_string(batch) for batch in run.inputs["messages"]] - else: - raise ValueError("No prompts found in LLM run inputs") - return LLMRun( - uuid=str(run.id) if run.id else None, # type: ignore[arg-type] - parent_uuid=str(run.parent_run_id) if run.parent_run_id else None, - start_time=run.start_time, - end_time=run.end_time, # type: ignore[arg-type] - extra=run.extra, - execution_order=run.execution_order, - child_execution_order=run.child_execution_order, - serialized=run.serialized, # type: ignore[arg-type] - session_id=session.id, - error=run.error, - prompts=prompts, - response=run.outputs if run.outputs else None, # type: ignore[arg-type] - ) - if run.run_type == "chain": - child_runs = [self._convert_to_v1_run(run) for run in run.child_runs] - return ChainRun( - uuid=str(run.id) if run.id else None, # type: ignore[arg-type] - parent_uuid=str(run.parent_run_id) if run.parent_run_id else None, - start_time=run.start_time, - end_time=run.end_time, # type: ignore[arg-type] - execution_order=run.execution_order, - child_execution_order=run.child_execution_order, - serialized=run.serialized, # type: ignore[arg-type] - session_id=session.id, - inputs=run.inputs, - outputs=run.outputs, - error=run.error, - extra=run.extra, - child_llm_runs=[run for run in child_runs if isinstance(run, LLMRun)], - child_chain_runs=[ - run for run in child_runs if isinstance(run, ChainRun) - ], - child_tool_runs=[run for run in child_runs if isinstance(run, ToolRun)], - ) - if run.run_type == "tool": - child_runs = [self._convert_to_v1_run(run) for run in run.child_runs] - return ToolRun( - uuid=str(run.id) if run.id else None, # type: ignore[arg-type] - parent_uuid=str(run.parent_run_id) if run.parent_run_id else None, - start_time=run.start_time, - end_time=run.end_time, # type: ignore[arg-type] - execution_order=run.execution_order, - child_execution_order=run.child_execution_order, - serialized=run.serialized, # type: ignore[arg-type] - session_id=session.id, - action=str(run.serialized), - tool_input=run.inputs.get("input", ""), - output=None if run.outputs is None else run.outputs.get("output"), - error=run.error, - extra=run.extra, - child_chain_runs=[ - run for run in child_runs if isinstance(run, ChainRun) - ], - child_tool_runs=[run for run in child_runs if isinstance(run, ToolRun)], - child_llm_runs=[run for run in child_runs if isinstance(run, LLMRun)], - ) - raise ValueError(f"Unknown run type: {run.run_type}") - - def _persist_run(self, run: Union[Run, LLMRun, ChainRun, ToolRun]) -> None: - """Persist a run.""" - if isinstance(run, Run): - v1_run = self._convert_to_v1_run(run) - else: - v1_run = run - if isinstance(v1_run, LLMRun): - endpoint = f"{self._endpoint}/llm-runs" - elif isinstance(v1_run, ChainRun): - endpoint = f"{self._endpoint}/chain-runs" - else: - endpoint = f"{self._endpoint}/tool-runs" - - try: - response = requests.post( - endpoint, - data=v1_run.json(), - headers=self._headers, - ) - raise_for_status_with_text(response) - except Exception as e: - logger.warning(f"Failed to persist run: {e}") - - def _persist_session( - self, session_create: TracerSessionV1Base - ) -> Union[TracerSessionV1, TracerSession]: - """Persist a session.""" - try: - r = requests.post( - f"{self._endpoint}/sessions", - data=session_create.json(), - headers=self._headers, - ) - session = TracerSessionV1(id=r.json()["id"], **session_create.dict()) - except Exception as e: - logger.warning(f"Failed to create session, using default session: {e}") - session = TracerSessionV1(id=1, **session_create.dict()) - return session - - def _load_session(self, session_name: Optional[str] = None) -> TracerSessionV1: - """Load a session from the tracer.""" - try: - url = f"{self._endpoint}/sessions" - if session_name: - url += f"?name={session_name}" - r = requests.get(url, headers=self._headers) - - tracer_session = TracerSessionV1(**r.json()[0]) - except Exception as e: - session_type = "default" if not session_name else session_name - logger.warning( - f"Failed to load {session_type} session, using empty session: {e}" - ) - tracer_session = TracerSessionV1(id=1) - - self.session = tracer_session - return tracer_session - - def load_session(self, session_name: str) -> Union[TracerSessionV1, TracerSession]: - """Load a session with the given name from the tracer.""" - return self._load_session(session_name) - - def load_default_session(self) -> Union[TracerSessionV1, TracerSession]: - """Load the default tracing session and set it as the Tracer's session.""" - return self._load_session("default") +def LangChainTracerV1(*args: Any, **kwargs: Any) -> Any: + raise RuntimeError( + "LangChainTracerV1 is no longer supported. Please use LangChainTracer instead." + ) diff --git a/libs/core/langchain_core/utils/loading.py b/libs/core/langchain_core/utils/loading.py index 10ab94bcc4..3affa3f666 100644 --- a/libs/core/langchain_core/utils/loading.py +++ b/libs/core/langchain_core/utils/loading.py @@ -1,68 +1,13 @@ """Utilities for loading configurations from langchain_core-hub.""" -import os -import re -import tempfile -from pathlib import Path, PurePosixPath -from typing import Any, Callable, Optional, Set, TypeVar, Union -from urllib.parse import urljoin - -import requests - -from langchain_core._api.deprecation import deprecated - -DEFAULT_REF = os.environ.get("LANGCHAIN_HUB_DEFAULT_REF", "master") -LANGCHAINHUB_REPO = "https://raw.githubusercontent.com/hwchase17/langchain-hub/" -URL_BASE = os.environ.get( - "LANGCHAIN_HUB_URL_BASE", - LANGCHAINHUB_REPO + "{ref}/", -) -HUB_PATH_RE = re.compile(r"lc(?P@[^:]+)?://(?P.*)") - -T = TypeVar("T") +from typing import Any -@deprecated( - since="0.1.30", - removal="0.2", - message=( - "Using the hwchase17/langchain-hub " - "repo for prompts is deprecated. Please use " - "https://smith.langchain.com/hub instead." - ), -) def try_load_from_hub( - path: Union[str, Path], - loader: Callable[[str], T], - valid_prefix: str, - valid_suffixes: Set[str], + *args: Any, **kwargs: Any, -) -> Optional[T]: - """Load configuration from hub. Returns None if path is not a hub path.""" - if not isinstance(path, str) or not (match := HUB_PATH_RE.match(path)): - return None - ref, remote_path_str = match.groups() - ref = ref[1:] if ref else DEFAULT_REF - remote_path = Path(remote_path_str) - if remote_path.parts[0] != valid_prefix: - return None - if remote_path.suffix[1:] not in valid_suffixes: - raise ValueError(f"Unsupported file type, must be one of {valid_suffixes}.") - - # Using Path with URLs is not recommended, because on Windows - # the backslash is used as the path separator, which can cause issues - # when working with URLs that use forward slashes as the path separator. - # Instead, use PurePosixPath to ensure that forward slashes are used as the - # path separator, regardless of the operating system. - full_url = urljoin(URL_BASE.format(ref=ref), PurePosixPath(remote_path).__str__()) - if not full_url.startswith(LANGCHAINHUB_REPO): - raise ValueError(f"Invalid hub path: {path}") - - r = requests.get(full_url, timeout=5) - if r.status_code != 200: - raise ValueError(f"Could not find file at {full_url}") - with tempfile.TemporaryDirectory() as tmpdirname: - file = Path(tmpdirname) / remote_path.name - with open(file, "wb") as f: - f.write(r.content) - return loader(str(file), **kwargs) +) -> Any: + raise RuntimeError( + "Loading from the deprecated github-based Hub is no longer supported. " + "Please use the new LangChain Hub at https://smith.langchain.com/hub instead." + ) diff --git a/libs/core/poetry.lock b/libs/core/poetry.lock index 9495b9d4a8..553186b708 100644 --- a/libs/core/poetry.lock +++ b/libs/core/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "annotated-types" @@ -604,13 +604,13 @@ files = [ [[package]] name = "httpcore" -version = "1.0.4" +version = "1.0.5" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpcore-1.0.4-py3-none-any.whl", hash = "sha256:ac418c1db41bade2ad53ae2f3834a3a0f5ae76b56cf5aa497d2d033384fc7d73"}, - {file = "httpcore-1.0.4.tar.gz", hash = "sha256:cb2839ccfcba0d2d3c1131d3c3e26dfc327326fbe7a5dc0dbfe9f6c9151bb022"}, + {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, + {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, ] [package.dependencies] @@ -621,7 +621,7 @@ h11 = ">=0.13,<0.15" asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.25.0)"] +trio = ["trio (>=0.22.0,<0.26.0)"] [[package]] name = "httpx" @@ -708,13 +708,13 @@ files = [ [[package]] name = "ipykernel" -version = "6.29.3" +version = "6.29.4" description = "IPython Kernel for Jupyter" optional = false python-versions = ">=3.8" files = [ - {file = "ipykernel-6.29.3-py3-none-any.whl", hash = "sha256:5aa086a4175b0229d4eca211e181fb473ea78ffd9869af36ba7694c947302a21"}, - {file = "ipykernel-6.29.3.tar.gz", hash = "sha256:e14c250d1f9ea3989490225cc1a542781b095a18a19447fcf2b5eaf7d0ac5bd2"}, + {file = "ipykernel-6.29.4-py3-none-any.whl", hash = "sha256:1181e653d95c6808039c509ef8e67c4126b3b3af7781496c7cbfb5ed938a27da"}, + {file = "ipykernel-6.29.4.tar.gz", hash = "sha256:3d44070060f9475ac2092b760123fadf105d2e2493c24848b6691a7c4f42af5c"}, ] [package.dependencies] @@ -1208,7 +1208,7 @@ develop = true langchain-core = "^0.1.28" [package.extras] -extended-testing = ["lxml (>=5.1.0,<6.0.0)"] +extended-testing = ["lxml (>=4.9.3,<6.0)"] [package.source] type = "directory" @@ -1216,13 +1216,13 @@ url = "../text-splitters" [[package]] name = "langsmith" -version = "0.1.31" +version = "0.1.38" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langsmith-0.1.31-py3-none-any.whl", hash = "sha256:5211a9dc00831db307eb843485a97096484b697b5d2cd1efaac34228e97ca087"}, - {file = "langsmith-0.1.31.tar.gz", hash = "sha256:efd54ccd44be7fda911bfdc0ead340473df2fdd07345c7252901834d0c4aa37e"}, + {file = "langsmith-0.1.38-py3-none-any.whl", hash = "sha256:f36479f82cf537cf40d129ac2e485e72a3981360c7b6cf2549dad77d98eafd8f"}, + {file = "langsmith-0.1.38.tar.gz", hash = "sha256:2c1f98ac0a8c02e43b625650a6e13c65b09523551bfc21a59d20963f46f7d265"}, ] [package.dependencies] @@ -1553,61 +1553,62 @@ files = [ [[package]] name = "orjson" -version = "3.9.15" +version = "3.10.0" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" optional = false python-versions = ">=3.8" files = [ - {file = "orjson-3.9.15-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:d61f7ce4727a9fa7680cd6f3986b0e2c732639f46a5e0156e550e35258aa313a"}, - {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4feeb41882e8aa17634b589533baafdceb387e01e117b1ec65534ec724023d04"}, - {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fbbeb3c9b2edb5fd044b2a070f127a0ac456ffd079cb82746fc84af01ef021a4"}, - {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b66bcc5670e8a6b78f0313bcb74774c8291f6f8aeef10fe70e910b8040f3ab75"}, - {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2973474811db7b35c30248d1129c64fd2bdf40d57d84beed2a9a379a6f57d0ab"}, - {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fe41b6f72f52d3da4db524c8653e46243c8c92df826ab5ffaece2dba9cccd58"}, - {file = "orjson-3.9.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4228aace81781cc9d05a3ec3a6d2673a1ad0d8725b4e915f1089803e9efd2b99"}, - {file = "orjson-3.9.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6f7b65bfaf69493c73423ce9db66cfe9138b2f9ef62897486417a8fcb0a92bfe"}, - {file = "orjson-3.9.15-cp310-none-win32.whl", hash = "sha256:2d99e3c4c13a7b0fb3792cc04c2829c9db07838fb6973e578b85c1745e7d0ce7"}, - {file = "orjson-3.9.15-cp310-none-win_amd64.whl", hash = "sha256:b725da33e6e58e4a5d27958568484aa766e825e93aa20c26c91168be58e08cbb"}, - {file = "orjson-3.9.15-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:c8e8fe01e435005d4421f183038fc70ca85d2c1e490f51fb972db92af6e047c2"}, - {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87f1097acb569dde17f246faa268759a71a2cb8c96dd392cd25c668b104cad2f"}, - {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff0f9913d82e1d1fadbd976424c316fbc4d9c525c81d047bbdd16bd27dd98cfc"}, - {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8055ec598605b0077e29652ccfe9372247474375e0e3f5775c91d9434e12d6b1"}, - {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d6768a327ea1ba44c9114dba5fdda4a214bdb70129065cd0807eb5f010bfcbb5"}, - {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12365576039b1a5a47df01aadb353b68223da413e2e7f98c02403061aad34bde"}, - {file = "orjson-3.9.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:71c6b009d431b3839d7c14c3af86788b3cfac41e969e3e1c22f8a6ea13139404"}, - {file = "orjson-3.9.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e18668f1bd39e69b7fed19fa7cd1cd110a121ec25439328b5c89934e6d30d357"}, - {file = "orjson-3.9.15-cp311-none-win32.whl", hash = "sha256:62482873e0289cf7313461009bf62ac8b2e54bc6f00c6fabcde785709231a5d7"}, - {file = "orjson-3.9.15-cp311-none-win_amd64.whl", hash = "sha256:b3d336ed75d17c7b1af233a6561cf421dee41d9204aa3cfcc6c9c65cd5bb69a8"}, - {file = "orjson-3.9.15-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:82425dd5c7bd3adfe4e94c78e27e2fa02971750c2b7ffba648b0f5d5cc016a73"}, - {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c51378d4a8255b2e7c1e5cc430644f0939539deddfa77f6fac7b56a9784160a"}, - {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6ae4e06be04dc00618247c4ae3f7c3e561d5bc19ab6941427f6d3722a0875ef7"}, - {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bcef128f970bb63ecf9a65f7beafd9b55e3aaf0efc271a4154050fc15cdb386e"}, - {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b72758f3ffc36ca566ba98a8e7f4f373b6c17c646ff8ad9b21ad10c29186f00d"}, - {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c57bc7b946cf2efa67ac55766e41764b66d40cbd9489041e637c1304400494"}, - {file = "orjson-3.9.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:946c3a1ef25338e78107fba746f299f926db408d34553b4754e90a7de1d44068"}, - {file = "orjson-3.9.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2f256d03957075fcb5923410058982aea85455d035607486ccb847f095442bda"}, - {file = "orjson-3.9.15-cp312-none-win_amd64.whl", hash = "sha256:5bb399e1b49db120653a31463b4a7b27cf2fbfe60469546baf681d1b39f4edf2"}, - {file = "orjson-3.9.15-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b17f0f14a9c0ba55ff6279a922d1932e24b13fc218a3e968ecdbf791b3682b25"}, - {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f6cbd8e6e446fb7e4ed5bac4661a29e43f38aeecbf60c4b900b825a353276a1"}, - {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:76bc6356d07c1d9f4b782813094d0caf1703b729d876ab6a676f3aaa9a47e37c"}, - {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fdfa97090e2d6f73dced247a2f2d8004ac6449df6568f30e7fa1a045767c69a6"}, - {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7413070a3e927e4207d00bd65f42d1b780fb0d32d7b1d951f6dc6ade318e1b5a"}, - {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9cf1596680ac1f01839dba32d496136bdd5d8ffb858c280fa82bbfeb173bdd40"}, - {file = "orjson-3.9.15-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:809d653c155e2cc4fd39ad69c08fdff7f4016c355ae4b88905219d3579e31eb7"}, - {file = "orjson-3.9.15-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:920fa5a0c5175ab14b9c78f6f820b75804fb4984423ee4c4f1e6d748f8b22bc1"}, - {file = "orjson-3.9.15-cp38-none-win32.whl", hash = "sha256:2b5c0f532905e60cf22a511120e3719b85d9c25d0e1c2a8abb20c4dede3b05a5"}, - {file = "orjson-3.9.15-cp38-none-win_amd64.whl", hash = "sha256:67384f588f7f8daf040114337d34a5188346e3fae6c38b6a19a2fe8c663a2f9b"}, - {file = "orjson-3.9.15-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:6fc2fe4647927070df3d93f561d7e588a38865ea0040027662e3e541d592811e"}, - {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34cbcd216e7af5270f2ffa63a963346845eb71e174ea530867b7443892d77180"}, - {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f541587f5c558abd93cb0de491ce99a9ef8d1ae29dd6ab4dbb5a13281ae04cbd"}, - {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92255879280ef9c3c0bcb327c5a1b8ed694c290d61a6a532458264f887f052cb"}, - {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:05a1f57fb601c426635fcae9ddbe90dfc1ed42245eb4c75e4960440cac667262"}, - {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ede0bde16cc6e9b96633df1631fbcd66491d1063667f260a4f2386a098393790"}, - {file = "orjson-3.9.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e88b97ef13910e5f87bcbc4dd7979a7de9ba8702b54d3204ac587e83639c0c2b"}, - {file = "orjson-3.9.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:57d5d8cf9c27f7ef6bc56a5925c7fbc76b61288ab674eb352c26ac780caa5b10"}, - {file = "orjson-3.9.15-cp39-none-win32.whl", hash = "sha256:001f4eb0ecd8e9ebd295722d0cbedf0748680fb9998d3993abaed2f40587257a"}, - {file = "orjson-3.9.15-cp39-none-win_amd64.whl", hash = "sha256:ea0b183a5fe6b2b45f3b854b0d19c4e932d6f5934ae1f723b07cf9560edd4ec7"}, - {file = "orjson-3.9.15.tar.gz", hash = "sha256:95cae920959d772f30ab36d3b25f83bb0f3be671e986c72ce22f8fa700dae061"}, + {file = "orjson-3.10.0-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:47af5d4b850a2d1328660661f0881b67fdbe712aea905dadd413bdea6f792c33"}, + {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c90681333619d78360d13840c7235fdaf01b2b129cb3a4f1647783b1971542b6"}, + {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:400c5b7c4222cb27b5059adf1fb12302eebcabf1978f33d0824aa5277ca899bd"}, + {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5dcb32e949eae80fb335e63b90e5808b4b0f64e31476b3777707416b41682db5"}, + {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7d507c7493252c0a0264b5cc7e20fa2f8622b8a83b04d819b5ce32c97cf57b"}, + {file = "orjson-3.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e286a51def6626f1e0cc134ba2067dcf14f7f4b9550f6dd4535fd9d79000040b"}, + {file = "orjson-3.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8acd4b82a5f3a3ec8b1dc83452941d22b4711964c34727eb1e65449eead353ca"}, + {file = "orjson-3.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:30707e646080dd3c791f22ce7e4a2fc2438765408547c10510f1f690bd336217"}, + {file = "orjson-3.10.0-cp310-none-win32.whl", hash = "sha256:115498c4ad34188dcb73464e8dc80e490a3e5e88a925907b6fedcf20e545001a"}, + {file = "orjson-3.10.0-cp310-none-win_amd64.whl", hash = "sha256:6735dd4a5a7b6df00a87d1d7a02b84b54d215fb7adac50dd24da5997ffb4798d"}, + {file = "orjson-3.10.0-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9587053e0cefc284e4d1cd113c34468b7d3f17666d22b185ea654f0775316a26"}, + {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bef1050b1bdc9ea6c0d08468e3e61c9386723633b397e50b82fda37b3563d72"}, + {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d16c6963ddf3b28c0d461641517cd312ad6b3cf303d8b87d5ef3fa59d6844337"}, + {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4251964db47ef090c462a2d909f16c7c7d5fe68e341dabce6702879ec26d1134"}, + {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:73bbbdc43d520204d9ef0817ac03fa49c103c7f9ea94f410d2950755be2c349c"}, + {file = "orjson-3.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:414e5293b82373606acf0d66313aecb52d9c8c2404b1900683eb32c3d042dbd7"}, + {file = "orjson-3.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:feaed5bb09877dc27ed0d37f037ddef6cb76d19aa34b108db270d27d3d2ef747"}, + {file = "orjson-3.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5127478260db640323cea131ee88541cb1a9fbce051f0b22fa2f0892f44da302"}, + {file = "orjson-3.10.0-cp311-none-win32.whl", hash = "sha256:b98345529bafe3c06c09996b303fc0a21961820d634409b8639bc16bd4f21b63"}, + {file = "orjson-3.10.0-cp311-none-win_amd64.whl", hash = "sha256:658ca5cee3379dd3d37dbacd43d42c1b4feee99a29d847ef27a1cb18abdfb23f"}, + {file = "orjson-3.10.0-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:4329c1d24fd130ee377e32a72dc54a3c251e6706fccd9a2ecb91b3606fddd998"}, + {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef0f19fdfb6553342b1882f438afd53c7cb7aea57894c4490c43e4431739c700"}, + {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c4f60db24161534764277f798ef53b9d3063092f6d23f8f962b4a97edfa997a0"}, + {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1de3fd5c7b208d836f8ecb4526995f0d5877153a4f6f12f3e9bf11e49357de98"}, + {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f93e33f67729d460a177ba285002035d3f11425ed3cebac5f6ded4ef36b28344"}, + {file = "orjson-3.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:237ba922aef472761acd697eef77fef4831ab769a42e83c04ac91e9f9e08fa0e"}, + {file = "orjson-3.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:98c1bfc6a9bec52bc8f0ab9b86cc0874b0299fccef3562b793c1576cf3abb570"}, + {file = "orjson-3.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:30d795a24be16c03dca0c35ca8f9c8eaaa51e3342f2c162d327bd0225118794a"}, + {file = "orjson-3.10.0-cp312-none-win32.whl", hash = "sha256:6a3f53dc650bc860eb26ec293dfb489b2f6ae1cbfc409a127b01229980e372f7"}, + {file = "orjson-3.10.0-cp312-none-win_amd64.whl", hash = "sha256:983db1f87c371dc6ffc52931eb75f9fe17dc621273e43ce67bee407d3e5476e9"}, + {file = "orjson-3.10.0-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9a667769a96a72ca67237224a36faf57db0c82ab07d09c3aafc6f956196cfa1b"}, + {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ade1e21dfde1d37feee8cf6464c20a2f41fa46c8bcd5251e761903e46102dc6b"}, + {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:23c12bb4ced1c3308eff7ba5c63ef8f0edb3e4c43c026440247dd6c1c61cea4b"}, + {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2d014cf8d4dc9f03fc9f870de191a49a03b1bcda51f2a957943fb9fafe55aac"}, + {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eadecaa16d9783affca33597781328e4981b048615c2ddc31c47a51b833d6319"}, + {file = "orjson-3.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd583341218826f48bd7c6ebf3310b4126216920853cbc471e8dbeaf07b0b80e"}, + {file = "orjson-3.10.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:90bfc137c75c31d32308fd61951d424424426ddc39a40e367704661a9ee97095"}, + {file = "orjson-3.10.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:13b5d3c795b09a466ec9fcf0bd3ad7b85467d91a60113885df7b8d639a9d374b"}, + {file = "orjson-3.10.0-cp38-none-win32.whl", hash = "sha256:5d42768db6f2ce0162544845facb7c081e9364a5eb6d2ef06cd17f6050b048d8"}, + {file = "orjson-3.10.0-cp38-none-win_amd64.whl", hash = "sha256:33e6655a2542195d6fd9f850b428926559dee382f7a862dae92ca97fea03a5ad"}, + {file = "orjson-3.10.0-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:4050920e831a49d8782a1720d3ca2f1c49b150953667eed6e5d63a62e80f46a2"}, + {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1897aa25a944cec774ce4a0e1c8e98fb50523e97366c637b7d0cddabc42e6643"}, + {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9bf565a69e0082ea348c5657401acec3cbbb31564d89afebaee884614fba36b4"}, + {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b6ebc17cfbbf741f5c1a888d1854354536f63d84bee537c9a7c0335791bb9009"}, + {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d2817877d0b69f78f146ab305c5975d0618df41acf8811249ee64231f5953fee"}, + {file = "orjson-3.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57d017863ec8aa4589be30a328dacd13c2dc49de1c170bc8d8c8a98ece0f2925"}, + {file = "orjson-3.10.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:22c2f7e377ac757bd3476ecb7480c8ed79d98ef89648f0176deb1da5cd014eb7"}, + {file = "orjson-3.10.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e62ba42bfe64c60c1bc84799944f80704e996592c6b9e14789c8e2a303279912"}, + {file = "orjson-3.10.0-cp39-none-win32.whl", hash = "sha256:60c0b1bdbccd959ebd1575bd0147bd5e10fc76f26216188be4a36b691c937077"}, + {file = "orjson-3.10.0-cp39-none-win_amd64.whl", hash = "sha256:175a41500ebb2fdf320bf78e8b9a75a1279525b62ba400b2b2444e274c2c8bee"}, + {file = "orjson-3.10.0.tar.gz", hash = "sha256:ba4d8cac5f2e2cff36bea6b6481cdb92b38c202bcec603d6f5ff91960595a1ed"}, ] [[package]] @@ -1807,13 +1808,13 @@ tests = ["pytest"] [[package]] name = "pycparser" -version = "2.21" +version = "2.22" description = "C parser in Python" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.8" files = [ - {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, - {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] [[package]] @@ -2145,6 +2146,7 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -2966,4 +2968,4 @@ extended-testing = ["jinja2"] [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<4.0" -content-hash = "203d96b330412ce9defad6739381e4031fc9e995c2d9e0a61a905fc79fff11dd" +content-hash = "c26c35cf1c6529b38924a1b9d3186fdefb3b3a1fecc5197559586451bb913f4a" diff --git a/libs/core/pyproject.toml b/libs/core/pyproject.toml index 3a485206c3..33fa2d294c 100644 --- a/libs/core/pyproject.toml +++ b/libs/core/pyproject.toml @@ -15,7 +15,6 @@ langsmith = "^0.1.0" tenacity = "^8.1.0" jsonpatch = "^1.33" PyYAML = ">=5.3" -requests = "^2" packaging = "^23.2" jinja2 = { version = "^3", optional = true } diff --git a/libs/core/tests/unit_tests/tracers/test_langchain_v1.py b/libs/core/tests/unit_tests/tracers/test_langchain_v1.py deleted file mode 100644 index 571b3edbd7..0000000000 --- a/libs/core/tests/unit_tests/tracers/test_langchain_v1.py +++ /dev/null @@ -1,562 +0,0 @@ -"""Test Tracer classes.""" -from __future__ import annotations - -from datetime import datetime, timezone -from typing import Any, List, Optional, Union -from uuid import uuid4 - -import pytest -from freezegun import freeze_time - -from langchain_core.callbacks import CallbackManager -from langchain_core.messages import HumanMessage -from langchain_core.outputs import LLMResult -from langchain_core.tracers.base import BaseTracer, TracerException -from langchain_core.tracers.langchain_v1 import ( - ChainRun, - LangChainTracerV1, - LLMRun, - ToolRun, - TracerSessionV1, -) -from langchain_core.tracers.schemas import Run, TracerSessionV1Base - -TEST_SESSION_ID = 2023 - -SERIALIZED = {"id": ["llm"]} -SERIALIZED_CHAT = {"id": ["chat_model"]} - - -def load_session(session_name: str) -> TracerSessionV1: - """Load a tracing session.""" - return TracerSessionV1( - id=TEST_SESSION_ID, name=session_name, start_time=datetime.now(timezone.utc) - ) - - -def new_session(name: Optional[str] = None) -> TracerSessionV1: - """Create a new tracing session.""" - return TracerSessionV1( - id=TEST_SESSION_ID, - name=name or "default", - start_time=datetime.now(timezone.utc), - ) - - -def _persist_session(session: TracerSessionV1Base) -> TracerSessionV1: - """Persist a tracing session.""" - return TracerSessionV1(**{**session.dict(), "id": TEST_SESSION_ID}) - - -def load_default_session() -> TracerSessionV1: - """Load a tracing session.""" - return TracerSessionV1( - id=TEST_SESSION_ID, name="default", start_time=datetime.now(timezone.utc) - ) - - -@pytest.fixture -def lang_chain_tracer_v1(monkeypatch: pytest.MonkeyPatch) -> LangChainTracerV1: - monkeypatch.setenv("LANGCHAIN_TENANT_ID", "test-tenant-id") - monkeypatch.setenv("LANGCHAIN_ENDPOINT", "http://test-endpoint.com") - monkeypatch.setenv("LANGCHAIN_API_KEY", "foo") - tracer = LangChainTracerV1() - return tracer - - -class FakeTracer(BaseTracer): - """Fake tracer that records LangChain execution.""" - - def __init__(self) -> None: - """Initialize the tracer.""" - super().__init__() - self.runs: List[Union[LLMRun, ChainRun, ToolRun]] = [] - - def _persist_run(self, run: Union[Run, LLMRun, ChainRun, ToolRun]) -> None: - """Persist a run.""" - if isinstance(run, Run): - with pytest.MonkeyPatch().context() as m: - m.setenv("LANGCHAIN_TENANT_ID", "test-tenant-id") - m.setenv("LANGCHAIN_ENDPOINT", "http://test-endpoint.com") - m.setenv("LANGCHAIN_API_KEY", "foo") - tracer = LangChainTracerV1() - tracer.load_default_session = load_default_session # type: ignore - run = tracer._convert_to_v1_run(run) - self.runs.append(run) - - def _persist_session(self, session: TracerSessionV1Base) -> TracerSessionV1: - """Persist a tracing session.""" - return _persist_session(session) - - def new_session(self, name: Optional[str] = None) -> TracerSessionV1: - """Create a new tracing session.""" - return new_session(name) - - def load_session(self, session_name: str) -> TracerSessionV1: - """Load a tracing session.""" - return load_session(session_name) - - def load_default_session(self) -> TracerSessionV1: - """Load a tracing session.""" - return load_default_session() - - -def _compare_run_with_error(run: Any, expected_run: Any) -> None: - received = run.dict() - received_err = received.pop("error") - expected = expected_run.dict() - expected_err = expected.pop("error") - assert received == expected - assert expected_err in received_err - - -@freeze_time("2023-01-01") -def test_tracer_llm_run() -> None: - """Test tracer on an LLM run.""" - uuid = uuid4() - compare_run = LLMRun( - uuid=str(uuid), - parent_uuid=None, - start_time=datetime.now(timezone.utc), - end_time=datetime.now(timezone.utc), - extra={}, - execution_order=1, - child_execution_order=1, - serialized=SERIALIZED, - prompts=[], - response=LLMResult(generations=[[]]), - session_id=TEST_SESSION_ID, - error=None, - ) - tracer = FakeTracer() - - tracer.new_session() - tracer.on_llm_start(serialized=SERIALIZED, prompts=[], run_id=uuid) - tracer.on_llm_end(response=LLMResult(generations=[[]]), run_id=uuid) - assert tracer.runs == [compare_run] - - -@freeze_time("2023-01-01") -def test_tracer_chat_model_run() -> None: - """Test tracer on a Chat Model run.""" - tracer = FakeTracer() - - tracer.new_session() - manager = CallbackManager(handlers=[tracer]) - run_managers = manager.on_chat_model_start( - serialized=SERIALIZED_CHAT, messages=[[HumanMessage(content="")]] - ) - compare_run = LLMRun( - uuid=str(run_managers[0].run_id), - parent_uuid=None, - start_time=datetime.now(timezone.utc), - end_time=datetime.now(timezone.utc), - extra={}, - execution_order=1, - child_execution_order=1, - serialized=SERIALIZED_CHAT, - prompts=["Human: "], - response=LLMResult(generations=[[]]), - session_id=TEST_SESSION_ID, - error=None, - ) - for run_manager in run_managers: - run_manager.on_llm_end(response=LLMResult(generations=[[]])) - assert tracer.runs == [compare_run] - - -@freeze_time("2023-01-01") -def test_tracer_llm_run_errors_no_start() -> None: - """Test tracer on an LLM run without a start.""" - tracer = FakeTracer() - - tracer.new_session() - with pytest.raises(TracerException): - tracer.on_llm_end(response=LLMResult(generations=[[]]), run_id=uuid4()) - - -@freeze_time("2023-01-01") -def test_tracer_multiple_llm_runs() -> None: - """Test the tracer with multiple runs.""" - uuid = uuid4() - compare_run = LLMRun( - uuid=str(uuid), - parent_uuid=None, - start_time=datetime.now(timezone.utc), - end_time=datetime.now(timezone.utc), - extra={}, - execution_order=1, - child_execution_order=1, - serialized=SERIALIZED, - prompts=[], - response=LLMResult(generations=[[]]), - session_id=TEST_SESSION_ID, - error=None, - ) - tracer = FakeTracer() - - tracer.new_session() - num_runs = 10 - for _ in range(num_runs): - tracer.on_llm_start(serialized=SERIALIZED, prompts=[], run_id=uuid) - tracer.on_llm_end(response=LLMResult(generations=[[]]), run_id=uuid) - - assert tracer.runs == [compare_run] * num_runs - - -@freeze_time("2023-01-01") -def test_tracer_chain_run() -> None: - """Test tracer on a Chain run.""" - uuid = uuid4() - compare_run = ChainRun( - uuid=str(uuid), - parent_uuid=None, - start_time=datetime.now(timezone.utc), - end_time=datetime.now(timezone.utc), - extra={}, - execution_order=1, - child_execution_order=1, - serialized={"name": "chain"}, - inputs={}, - outputs={}, - session_id=TEST_SESSION_ID, - error=None, - ) - tracer = FakeTracer() - - tracer.new_session() - tracer.on_chain_start(serialized={"name": "chain"}, inputs={}, run_id=uuid) - tracer.on_chain_end(outputs={}, run_id=uuid) - assert tracer.runs == [compare_run] - - -@freeze_time("2023-01-01") -def test_tracer_tool_run() -> None: - """Test tracer on a Tool run.""" - uuid = uuid4() - compare_run = ToolRun( - uuid=str(uuid), - parent_uuid=None, - start_time=datetime.now(timezone.utc), - end_time=datetime.now(timezone.utc), - extra={}, - execution_order=1, - child_execution_order=1, - serialized={"name": "tool"}, - tool_input="test", - output="test", - action="{'name': 'tool'}", - session_id=TEST_SESSION_ID, - error=None, - ) - tracer = FakeTracer() - - tracer.new_session() - tracer.on_tool_start(serialized={"name": "tool"}, input_str="test", run_id=uuid) - tracer.on_tool_end("test", run_id=uuid) - assert tracer.runs == [compare_run] - - -@freeze_time("2023-01-01") -def test_tracer_nested_run() -> None: - """Test tracer on a nested run.""" - tracer = FakeTracer() - tracer.new_session() - - chain_uuid = uuid4() - tool_uuid = uuid4() - llm_uuid1 = uuid4() - llm_uuid2 = uuid4() - for _ in range(10): - tracer.on_chain_start( - serialized={"name": "chain"}, inputs={}, run_id=chain_uuid - ) - tracer.on_tool_start( - serialized={"name": "tool"}, - input_str="test", - run_id=tool_uuid, - parent_run_id=chain_uuid, - ) - tracer.on_llm_start( - serialized=SERIALIZED, - prompts=[], - run_id=llm_uuid1, - parent_run_id=tool_uuid, - ) - tracer.on_llm_end(response=LLMResult(generations=[[]]), run_id=llm_uuid1) - tracer.on_tool_end("test", run_id=tool_uuid) - tracer.on_llm_start( - serialized=SERIALIZED, - prompts=[], - run_id=llm_uuid2, - parent_run_id=chain_uuid, - ) - tracer.on_llm_end(response=LLMResult(generations=[[]]), run_id=llm_uuid2) - tracer.on_chain_end(outputs={}, run_id=chain_uuid) - - compare_run = ChainRun( - uuid=str(chain_uuid), - error=None, - start_time=datetime.now(timezone.utc), - end_time=datetime.now(timezone.utc), - extra={}, - execution_order=1, - child_execution_order=4, - serialized={"name": "chain"}, - inputs={}, - outputs={}, - session_id=TEST_SESSION_ID, - child_chain_runs=[], - child_tool_runs=[ - ToolRun( - uuid=str(tool_uuid), - parent_uuid=str(chain_uuid), - start_time=datetime.now(timezone.utc), - end_time=datetime.now(timezone.utc), - extra={}, - execution_order=2, - child_execution_order=3, - serialized={"name": "tool"}, - tool_input="test", - output="test", - action="{'name': 'tool'}", - session_id=TEST_SESSION_ID, - error=None, - child_chain_runs=[], - child_tool_runs=[], - child_llm_runs=[ - LLMRun( - uuid=str(llm_uuid1), - parent_uuid=str(tool_uuid), - error=None, - start_time=datetime.now(timezone.utc), - end_time=datetime.now(timezone.utc), - extra={}, - execution_order=3, - child_execution_order=3, - serialized=SERIALIZED, - prompts=[], - response=LLMResult(generations=[[]]), - session_id=TEST_SESSION_ID, - ) - ], - ), - ], - child_llm_runs=[ - LLMRun( - uuid=str(llm_uuid2), - parent_uuid=str(chain_uuid), - error=None, - start_time=datetime.now(timezone.utc), - end_time=datetime.now(timezone.utc), - extra={}, - execution_order=4, - child_execution_order=4, - serialized=SERIALIZED, - prompts=[], - response=LLMResult(generations=[[]]), - session_id=TEST_SESSION_ID, - ), - ], - ) - assert tracer.runs[0] == compare_run - assert tracer.runs == [compare_run] * 10 - - -@freeze_time("2023-01-01") -def test_tracer_llm_run_on_error() -> None: - """Test tracer on an LLM run with an error.""" - exception = Exception("test") - uuid = uuid4() - - compare_run = LLMRun( - uuid=str(uuid), - parent_uuid=None, - start_time=datetime.now(timezone.utc), - end_time=datetime.now(timezone.utc), - extra={}, - execution_order=1, - child_execution_order=1, - serialized=SERIALIZED, - prompts=[], - response=None, - session_id=TEST_SESSION_ID, - error=repr(exception), - ) - tracer = FakeTracer() - - tracer.new_session() - tracer.on_llm_start(serialized=SERIALIZED, prompts=[], run_id=uuid) - tracer.on_llm_error(exception, run_id=uuid) - _compare_run_with_error(tracer.runs[0], compare_run) - - -@freeze_time("2023-01-01") -def test_tracer_chain_run_on_error() -> None: - """Test tracer on a Chain run with an error.""" - exception = Exception("test") - uuid = uuid4() - - compare_run = ChainRun( - uuid=str(uuid), - parent_uuid=None, - start_time=datetime.now(timezone.utc), - end_time=datetime.now(timezone.utc), - extra={}, - execution_order=1, - child_execution_order=1, - serialized={"name": "chain"}, - inputs={}, - outputs=None, - session_id=TEST_SESSION_ID, - error=repr(exception), - ) - tracer = FakeTracer() - - tracer.new_session() - tracer.on_chain_start(serialized={"name": "chain"}, inputs={}, run_id=uuid) - tracer.on_chain_error(exception, run_id=uuid) - _compare_run_with_error(tracer.runs[0], compare_run) - - -@freeze_time("2023-01-01") -def test_tracer_tool_run_on_error() -> None: - """Test tracer on a Tool run with an error.""" - exception = Exception("test") - uuid = uuid4() - - compare_run = ToolRun( - uuid=str(uuid), - parent_uuid=None, - start_time=datetime.now(timezone.utc), - end_time=datetime.now(timezone.utc), - extra={}, - execution_order=1, - child_execution_order=1, - serialized={"name": "tool"}, - tool_input="test", - output=None, - action="{'name': 'tool'}", - session_id=TEST_SESSION_ID, - error=repr(exception), - ) - tracer = FakeTracer() - - tracer.new_session() - tracer.on_tool_start(serialized={"name": "tool"}, input_str="test", run_id=uuid) - tracer.on_tool_error(exception, run_id=uuid) - _compare_run_with_error(tracer.runs[0], compare_run) - - -@pytest.fixture -def sample_tracer_session_v1() -> TracerSessionV1: - return TracerSessionV1(id=2, name="Sample session") - - -@freeze_time("2023-01-01") -def test_convert_run( - lang_chain_tracer_v1: LangChainTracerV1, - sample_tracer_session_v1: TracerSessionV1, -) -> None: - """Test converting a run to a V1 run.""" - llm_run = Run( # type: ignore[call-arg] - id="57a08cc4-73d2-4236-8370-549099d07fad", # type: ignore[arg-type] - name="llm_run", - execution_order=1, - child_execution_order=1, - start_time=datetime.now(timezone.utc), - end_time=datetime.now(timezone.utc), - session_id=TEST_SESSION_ID, - inputs={"prompts": []}, - outputs=LLMResult(generations=[[]]).dict(), - serialized={}, - extra={}, - run_type="llm", - ) - chain_run = Run( - id="57a08cc4-73d2-4236-8371-549099d07fad", # type: ignore[arg-type] - name="chain_run", - execution_order=1, - start_time=datetime.now(timezone.utc), - end_time=datetime.now(timezone.utc), - child_execution_order=1, - serialized={}, - inputs={}, - outputs={}, - child_runs=[llm_run], - extra={}, - run_type="chain", - ) - - tool_run = Run( - id="57a08cc4-73d2-4236-8372-549099d07fad", # type: ignore[arg-type] - name="tool_run", - execution_order=1, - child_execution_order=1, - inputs={"input": "test"}, - start_time=datetime.now(timezone.utc), - end_time=datetime.now(timezone.utc), - outputs=None, - serialized={}, - child_runs=[], - extra={}, - run_type="tool", - ) - - expected_llm_run = LLMRun( # type: ignore[call-arg] - uuid="57a08cc4-73d2-4236-8370-549099d07fad", - name="llm_run", - execution_order=1, - child_execution_order=1, - start_time=datetime.now(timezone.utc), - end_time=datetime.now(timezone.utc), - session_id=2, - prompts=[], - response=LLMResult(generations=[[]]), - serialized={}, - extra={}, - ) - - expected_chain_run = ChainRun( # type: ignore[call-arg] - uuid="57a08cc4-73d2-4236-8371-549099d07fad", - name="chain_run", - execution_order=1, - child_execution_order=1, - start_time=datetime.now(timezone.utc), - end_time=datetime.now(timezone.utc), - session_id=2, - serialized={}, - inputs={}, - outputs={}, - child_llm_runs=[expected_llm_run], - child_chain_runs=[], - child_tool_runs=[], - extra={}, - ) - expected_tool_run = ToolRun( # type: ignore[call-arg] - uuid="57a08cc4-73d2-4236-8372-549099d07fad", - name="tool_run", - execution_order=1, - child_execution_order=1, - session_id=2, - start_time=datetime.now(timezone.utc), - end_time=datetime.now(timezone.utc), - tool_input="test", - action="{}", - serialized={}, - child_llm_runs=[], - child_chain_runs=[], - child_tool_runs=[], - extra={}, - ) - lang_chain_tracer_v1.session = sample_tracer_session_v1 - converted_llm_run = lang_chain_tracer_v1._convert_to_v1_run(llm_run) - converted_chain_run = lang_chain_tracer_v1._convert_to_v1_run(chain_run) - converted_tool_run = lang_chain_tracer_v1._convert_to_v1_run(tool_run) - - assert isinstance(converted_llm_run, LLMRun) - assert isinstance(converted_chain_run, ChainRun) - assert isinstance(converted_tool_run, ToolRun) - assert converted_llm_run == expected_llm_run - assert converted_tool_run == expected_tool_run - assert converted_chain_run == expected_chain_run diff --git a/libs/core/tests/unit_tests/utils/test_loading.py b/libs/core/tests/unit_tests/utils/test_loading.py deleted file mode 100644 index 89678f32dc..0000000000 --- a/libs/core/tests/unit_tests/utils/test_loading.py +++ /dev/null @@ -1,106 +0,0 @@ -"""Test the functionality of loading from langchain-hub.""" - -import json -import re -from pathlib import Path -from typing import Iterable -from unittest.mock import Mock -from urllib.parse import urljoin - -import pytest -import responses - -from langchain_core.utils.loading import DEFAULT_REF, URL_BASE, try_load_from_hub - - -@pytest.fixture(autouse=True) -def mocked_responses() -> Iterable[responses.RequestsMock]: - """Fixture mocking requests.get.""" - with responses.RequestsMock() as rsps: - yield rsps - - -def test_non_hub_path() -> None: - """Test that a non-hub path returns None.""" - path = "chains/some_path" - loader = Mock() - valid_suffixes = {"suffix"} - result = try_load_from_hub(path, loader, "chains", valid_suffixes) - - assert result is None - loader.assert_not_called() - - -def test_invalid_prefix() -> None: - """Test that a hub path with an invalid prefix returns None.""" - path = "lc://agents/some_path" - loader = Mock() - valid_suffixes = {"suffix"} - result = try_load_from_hub(path, loader, "chains", valid_suffixes) - - assert result is None - loader.assert_not_called() - - -def test_invalid_suffix() -> None: - """Test that a hub path with an invalid suffix raises an error.""" - path = "lc://chains/path.invalid" - loader = Mock() - valid_suffixes = {"json"} - - with pytest.raises( - ValueError, match=f"Unsupported file type, must be one of {valid_suffixes}." - ): - try_load_from_hub(path, loader, "chains", valid_suffixes) - - loader.assert_not_called() - - -@pytest.mark.parametrize("ref", [None, "v0.3"]) -def test_success(mocked_responses: responses.RequestsMock, ref: str) -> None: - """Test that a valid hub path is loaded correctly with and without a ref.""" - path = "chains/path/chain.json" - lc_path_prefix = f"lc{('@' + ref) if ref else ''}://" - valid_suffixes = {"json"} - body = json.dumps({"foo": "bar"}) - ref = ref or DEFAULT_REF - - file_contents = None - - def loader(file_path: str) -> None: - nonlocal file_contents - assert file_contents is None - file_contents = Path(file_path).read_text() - - mocked_responses.get( # type: ignore - urljoin(URL_BASE.format(ref=ref), path), - body=body, - status=200, - content_type="application/json", - ) - - try_load_from_hub(f"{lc_path_prefix}{path}", loader, "chains", valid_suffixes) - assert file_contents == body - - -def test_failed_request(mocked_responses: responses.RequestsMock) -> None: - """Test that a failed request raises an error.""" - path = "chains/path/chain.json" - loader = Mock() - - mocked_responses.get( # type: ignore - urljoin(URL_BASE.format(ref=DEFAULT_REF), path), status=500 - ) - - with pytest.raises(ValueError, match=re.compile("Could not find file at .*")): - try_load_from_hub(f"lc://{path}", loader, "chains", {"json"}) - loader.assert_not_called() - - -def test_path_traversal() -> None: - """Test that a path traversal attack is prevented.""" - path = "lc://chains/../../../../../../../../../it.json" - loader = Mock() - - with pytest.raises(ValueError): - try_load_from_hub(path, loader, "chains", {"json"}) diff --git a/libs/experimental/langchain_experimental/prompts/load.py b/libs/experimental/langchain_experimental/prompts/load.py index 6cbe02edd6..77ab075760 100644 --- a/libs/experimental/langchain_experimental/prompts/load.py +++ b/libs/experimental/langchain_experimental/prompts/load.py @@ -1,52 +1,3 @@ -# Susceptible to arbitrary code execution: https://github.com/langchain-ai/langchain/issues/4849 -import importlib.util -import json -from pathlib import Path -from typing import Union +from langchain.prompts.loading import load_prompt -import yaml -from langchain.prompts.loading import load_prompt_from_config, try_load_from_hub -from langchain_core.prompts import BasePromptTemplate - - -def load_prompt(path: Union[str, Path]) -> BasePromptTemplate: - """Unified method for loading a prompt from LangChainHub or local file system.""" - - if hub_result := try_load_from_hub( - path, _load_prompt_from_file, "prompts", {"py", "json", "yaml"} - ): - return hub_result - else: - return _load_prompt_from_file(path) - - -def _load_prompt_from_file(file: Union[str, Path]) -> BasePromptTemplate: - """Load prompt from file.""" - # Convert file to a Path object. - if isinstance(file, str): - file_path = Path(file) - else: - file_path = file - # Load from either json or yaml. - if file_path.suffix == ".json": - with open(file_path) as f: - config = json.load(f) - elif file_path.suffix.endswith((".yaml", ".yml")): - with open(file_path, "r") as f: - config = yaml.safe_load(f) - elif file_path.suffix == ".py": - spec = importlib.util.spec_from_loader( - "prompt", loader=None, origin=str(file_path) - ) - if spec is None: - raise ValueError("could not load spec") - helper = importlib.util.module_from_spec(spec) - with open(file_path, "rb") as f: - exec(f.read(), helper.__dict__) - if not isinstance(helper.PROMPT, BasePromptTemplate): - raise ValueError("Did not get object of type BasePromptTemplate.") - return helper.PROMPT - else: - raise ValueError(f"Got unsupported file type {file_path.suffix}") - # Load the prompt from the config now. - return load_prompt_from_config(config) +__all__ = ["load_prompt"] diff --git a/libs/langchain/langchain/agents/loading.py b/libs/langchain/langchain/agents/loading.py index e1d9747df2..a5d15d24e7 100644 --- a/libs/langchain/langchain/agents/loading.py +++ b/libs/langchain/langchain/agents/loading.py @@ -1,4 +1,5 @@ """Functionality for loading agents.""" + import json import logging from pathlib import Path @@ -8,7 +9,6 @@ import yaml from langchain_core._api import deprecated from langchain_core.language_models import BaseLanguageModel from langchain_core.tools import Tool -from langchain_core.utils.loading import try_load_from_hub from langchain.agents.agent import BaseMultiActionAgent, BaseSingleActionAgent from langchain.agents.types import AGENT_TO_CLASS @@ -100,13 +100,13 @@ def load_agent( Returns: An agent executor. """ - valid_suffixes = {"json", "yaml"} - if hub_result := try_load_from_hub( - path, _load_agent_from_file, "agents", valid_suffixes - ): - return hub_result - else: - return _load_agent_from_file(path, **kwargs) + if isinstance(path, str) and path.startswith("lc://"): + raise RuntimeError( + "Loading from the deprecated github-based Hub is no longer supported. " + "Please use the new LangChain Hub at https://smith.langchain.com/hub " + "instead." + ) + return _load_agent_from_file(path, **kwargs) def _load_agent_from_file( diff --git a/libs/langchain/langchain/chains/loading.py b/libs/langchain/langchain/chains/loading.py index 32921145b1..2b7a3735b4 100644 --- a/libs/langchain/langchain/chains/loading.py +++ b/libs/langchain/langchain/chains/loading.py @@ -1,4 +1,5 @@ """Functionality for loading chains.""" + import json from pathlib import Path from typing import Any, Union @@ -10,7 +11,6 @@ from langchain_core.prompts.loading import ( load_prompt, load_prompt_from_config, ) -from langchain_core.utils.loading import try_load_from_hub from langchain.chains import ReduceDocumentsChain from langchain.chains.api.base import APIChain @@ -622,12 +622,13 @@ def load_chain_from_config(config: dict, **kwargs: Any) -> Chain: def load_chain(path: Union[str, Path], **kwargs: Any) -> Chain: """Unified method for loading a chain from LangChainHub or local fs.""" - if hub_result := try_load_from_hub( - path, _load_chain_from_file, "chains", {"json", "yaml"}, **kwargs - ): - return hub_result - else: - return _load_chain_from_file(path, **kwargs) + if isinstance(path, str) and path.startswith("lc://"): + raise RuntimeError( + "Loading from the deprecated github-based Hub is no longer supported. " + "Please use the new LangChain Hub at https://smith.langchain.com/hub " + "instead." + ) + return _load_chain_from_file(path, **kwargs) def _load_chain_from_file(file: Union[str, Path], **kwargs: Any) -> Chain: