Move runnable context to beta (#14507)

<!-- Thank you for contributing to LangChain!

Replace this entire comment with:
  - **Description:** a description of the change, 
  - **Issue:** the issue # it fixes (if applicable),
  - **Dependencies:** any dependencies required for this change,
- **Tag maintainer:** for a quicker response, tag the relevant
maintainer (see below),
- **Twitter handle:** we announce bigger features on Twitter. If your PR
gets announced, and you'd like a mention, we'll gladly shout you out!

Please make sure your PR is passing linting and testing before
submitting. Run `make format`, `make lint` and `make test` to check this
locally.

See contribution guidelines for more information on how to write/run
tests, lint, etc:

https://github.com/langchain-ai/langchain/blob/master/.github/CONTRIBUTING.md

If you're adding a new integration, please include:
1. a test for the integration, preferably unit tests that do not rely on
network access,
2. an example notebook showing its use. It lives in `docs/extras`
directory.

If no one reviews your PR within a few days, please @-mention one of
@baskaryan, @eyurtsev, @hwchase17.
 -->
This commit is contained in:
Nuno Campos 2023-12-11 13:58:30 -08:00 committed by GitHub
parent ed58eeb9c5
commit 3b5b0f16c6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 16 additions and 15 deletions

View File

@ -30,7 +30,6 @@ from langchain_core.runnables.config import (
get_config_list,
patch_config,
)
from langchain_core.runnables.context import Context
from langchain_core.runnables.fallbacks import RunnableWithFallbacks
from langchain_core.runnables.passthrough import RunnablePassthrough
from langchain_core.runnables.router import RouterInput, RouterRunnable
@ -48,7 +47,6 @@ __all__ = [
"ConfigurableField",
"ConfigurableFieldSingleOption",
"ConfigurableFieldMultiOption",
"Context",
"patch_config",
"RouterInput",
"RouterRunnable",

View File

@ -1403,7 +1403,10 @@ class RunnableSequence(RunnableSerializable[Input, Output]):
@property
def config_specs(self) -> List[ConfigurableFieldSpec]:
from langchain_core.runnables.context import CONTEXT_CONFIG_PREFIX, _key_from_id
from langchain_core.beta.runnables.context import (
CONTEXT_CONFIG_PREFIX,
_key_from_id,
)
# get all specs
all_specs = [
@ -1495,7 +1498,7 @@ class RunnableSequence(RunnableSerializable[Input, Output]):
)
def invoke(self, input: Input, config: Optional[RunnableConfig] = None) -> Output:
from langchain_core.runnables.context import config_with_context
from langchain_core.beta.runnables.context import config_with_context
# setup callbacks and context
config = config_with_context(ensure_config(config), self.steps)
@ -1529,7 +1532,7 @@ class RunnableSequence(RunnableSerializable[Input, Output]):
config: Optional[RunnableConfig] = None,
**kwargs: Optional[Any],
) -> Output:
from langchain_core.runnables.context import aconfig_with_context
from langchain_core.beta.runnables.context import aconfig_with_context
# setup callbacks and context
config = aconfig_with_context(ensure_config(config), self.steps)
@ -1565,8 +1568,8 @@ class RunnableSequence(RunnableSerializable[Input, Output]):
return_exceptions: bool = False,
**kwargs: Optional[Any],
) -> List[Output]:
from langchain_core.beta.runnables.context import config_with_context
from langchain_core.callbacks.manager import CallbackManager
from langchain_core.runnables.context import config_with_context
if not inputs:
return []
@ -1688,8 +1691,8 @@ class RunnableSequence(RunnableSerializable[Input, Output]):
return_exceptions: bool = False,
**kwargs: Optional[Any],
) -> List[Output]:
from langchain_core.beta.runnables.context import aconfig_with_context
from langchain_core.callbacks.manager import AsyncCallbackManager
from langchain_core.runnables.context import aconfig_with_context
if not inputs:
return []
@ -1812,7 +1815,7 @@ class RunnableSequence(RunnableSerializable[Input, Output]):
run_manager: CallbackManagerForChainRun,
config: RunnableConfig,
) -> Iterator[Output]:
from langchain_core.runnables.context import config_with_context
from langchain_core.beta.runnables.context import config_with_context
steps = [self.first] + self.middle + [self.last]
config = config_with_context(config, self.steps)
@ -1839,7 +1842,7 @@ class RunnableSequence(RunnableSerializable[Input, Output]):
run_manager: AsyncCallbackManagerForChainRun,
config: RunnableConfig,
) -> AsyncIterator[Output]:
from langchain_core.runnables.context import aconfig_with_context
from langchain_core.beta.runnables.context import aconfig_with_context
steps = [self.first] + self.middle + [self.last]
config = aconfig_with_context(config, self.steps)

View File

@ -26,10 +26,6 @@ from langchain_core.runnables.config import (
get_callback_manager_for_config,
patch_config,
)
from langchain_core.runnables.context import (
CONTEXT_CONFIG_PREFIX,
CONTEXT_CONFIG_SUFFIX_SET,
)
from langchain_core.runnables.utils import (
ConfigurableFieldSpec,
Input,
@ -152,6 +148,11 @@ class RunnableBranch(RunnableSerializable[Input, Output]):
@property
def config_specs(self) -> List[ConfigurableFieldSpec]:
from langchain_core.beta.runnables.context import (
CONTEXT_CONFIG_PREFIX,
CONTEXT_CONFIG_SUFFIX_SET,
)
specs = get_unique_config_specs(
spec
for step in (

View File

@ -2,11 +2,11 @@ from typing import Any, Callable, List, NamedTuple, Union
import pytest
from langchain_core.beta.runnables.context import Context
from langchain_core.output_parsers.string import StrOutputParser
from langchain_core.prompt_values import StringPromptValue
from langchain_core.prompts.prompt import PromptTemplate
from langchain_core.runnables.base import Runnable, RunnableLambda
from langchain_core.runnables.context import Context
from langchain_core.runnables.passthrough import RunnablePassthrough
from langchain_core.runnables.utils import aadd, add
from tests.unit_tests.fake.llm import FakeListLLM, FakeStreamingListLLM

View File

@ -2,7 +2,6 @@ from langchain_core.runnables import __all__
EXPECTED_ALL = [
"AddableDict",
"Context",
"ConfigurableField",
"ConfigurableFieldSingleOption",
"ConfigurableFieldMultiOption",