|
|
|
@ -1,4 +1,5 @@
|
|
|
|
|
"""Base interface for large language models to expose."""
|
|
|
|
|
|
|
|
|
|
from __future__ import annotations
|
|
|
|
|
|
|
|
|
|
import asyncio
|
|
|
|
@ -16,7 +17,6 @@ from typing import (
|
|
|
|
|
Dict,
|
|
|
|
|
Iterator,
|
|
|
|
|
List,
|
|
|
|
|
Mapping,
|
|
|
|
|
Optional,
|
|
|
|
|
Sequence,
|
|
|
|
|
Tuple,
|
|
|
|
@ -56,19 +56,13 @@ from langchain_core.messages import (
|
|
|
|
|
)
|
|
|
|
|
from langchain_core.outputs import Generation, GenerationChunk, LLMResult, RunInfo
|
|
|
|
|
from langchain_core.prompt_values import ChatPromptValue, PromptValue, StringPromptValue
|
|
|
|
|
from langchain_core.pydantic_v1 import Field, root_validator, validator
|
|
|
|
|
from langchain_core.pydantic_v1 import Field, root_validator
|
|
|
|
|
from langchain_core.runnables import RunnableConfig, ensure_config, get_config_list
|
|
|
|
|
from langchain_core.runnables.config import run_in_executor
|
|
|
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _get_verbosity() -> bool:
|
|
|
|
|
from langchain_core.globals import get_verbose
|
|
|
|
|
|
|
|
|
|
return get_verbose()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@functools.lru_cache
|
|
|
|
|
def _log_error_once(msg: str) -> None:
|
|
|
|
|
"""Log an error once."""
|
|
|
|
@ -200,16 +194,6 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
|
|
|
|
|
|
|
|
|
It should take in a prompt and return a string."""
|
|
|
|
|
|
|
|
|
|
cache: Optional[bool] = None
|
|
|
|
|
"""Whether to cache the response."""
|
|
|
|
|
verbose: bool = Field(default_factory=_get_verbosity)
|
|
|
|
|
"""Whether to print out response text."""
|
|
|
|
|
callbacks: Callbacks = Field(default=None, exclude=True)
|
|
|
|
|
"""Callbacks to add to the run trace."""
|
|
|
|
|
tags: Optional[List[str]] = Field(default=None, exclude=True)
|
|
|
|
|
"""Tags to add to the run trace."""
|
|
|
|
|
metadata: Optional[Dict[str, Any]] = Field(default=None, exclude=True)
|
|
|
|
|
"""Metadata to add to the run trace."""
|
|
|
|
|
callback_manager: Optional[BaseCallbackManager] = Field(default=None, exclude=True)
|
|
|
|
|
"""[DEPRECATED]"""
|
|
|
|
|
|
|
|
|
@ -229,17 +213,6 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
|
|
|
|
values["callbacks"] = values.pop("callback_manager", None)
|
|
|
|
|
return values
|
|
|
|
|
|
|
|
|
|
@validator("verbose", pre=True, always=True)
|
|
|
|
|
def set_verbose(cls, verbose: Optional[bool]) -> bool:
|
|
|
|
|
"""If verbose is None, set it.
|
|
|
|
|
|
|
|
|
|
This allows users to pass in None as verbose to access the global setting.
|
|
|
|
|
"""
|
|
|
|
|
if verbose is None:
|
|
|
|
|
return _get_verbosity()
|
|
|
|
|
else:
|
|
|
|
|
return verbose
|
|
|
|
|
|
|
|
|
|
# --- Runnable methods ---
|
|
|
|
|
|
|
|
|
|
@property
|
|
|
|
@ -1081,11 +1054,6 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
|
|
|
|
content = await self._call_async(text, stop=_stop, **kwargs)
|
|
|
|
|
return AIMessage(content=content)
|
|
|
|
|
|
|
|
|
|
@property
|
|
|
|
|
def _identifying_params(self) -> Mapping[str, Any]:
|
|
|
|
|
"""Get the identifying parameters."""
|
|
|
|
|
return {}
|
|
|
|
|
|
|
|
|
|
def __str__(self) -> str:
|
|
|
|
|
"""Get a string representation of the object for printing."""
|
|
|
|
|
cls_name = f"\033[1m{self.__class__.__name__}\033[0m"
|
|
|
|
|