From cda68d717c14215fdacfee9394dccd5dc43e8fbd Mon Sep 17 00:00:00 2001 From: Li-Lun Lin <70696274+alan910127@users.noreply.github.com> Date: Wed, 3 Jan 2024 10:49:01 +0800 Subject: [PATCH] core[patch]: update LanguageModelInput from List to Sequence (#14405) Co-authored-by: Erick Friis --- libs/core/langchain_core/language_models/base.py | 2 +- libs/core/langchain_core/language_models/chat_models.py | 2 +- libs/core/langchain_core/language_models/llms.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/libs/core/langchain_core/language_models/base.py b/libs/core/langchain_core/language_models/base.py index b590e0df84..ef9d129967 100644 --- a/libs/core/langchain_core/language_models/base.py +++ b/libs/core/langchain_core/language_models/base.py @@ -48,7 +48,7 @@ def _get_token_ids_default_method(text: str) -> List[int]: return tokenizer.encode(text) -LanguageModelInput = Union[PromptValue, str, List[BaseMessage]] +LanguageModelInput = Union[PromptValue, str, Sequence[BaseMessage]] LanguageModelOutput = Union[BaseMessage, str] LanguageModelLike = Runnable[LanguageModelInput, LanguageModelOutput] LanguageModelOutputVar = TypeVar("LanguageModelOutputVar", BaseMessage, str) diff --git a/libs/core/langchain_core/language_models/chat_models.py b/libs/core/langchain_core/language_models/chat_models.py index 047908f06e..c833ebede8 100644 --- a/libs/core/langchain_core/language_models/chat_models.py +++ b/libs/core/langchain_core/language_models/chat_models.py @@ -142,7 +142,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC): return input elif isinstance(input, str): return StringPromptValue(text=input) - elif isinstance(input, list): + elif isinstance(input, Sequence): return ChatPromptValue(messages=input) else: raise ValueError( diff --git a/libs/core/langchain_core/language_models/llms.py b/libs/core/langchain_core/language_models/llms.py index 4ecfc93521..228a3814f1 100644 --- a/libs/core/langchain_core/language_models/llms.py +++ b/libs/core/langchain_core/language_models/llms.py @@ -205,7 +205,7 @@ class BaseLLM(BaseLanguageModel[str], ABC): return input elif isinstance(input, str): return StringPromptValue(text=input) - elif isinstance(input, list): + elif isinstance(input, Sequence): return ChatPromptValue(messages=input) else: raise ValueError(