core[patch]: update LanguageModelInput from List to Sequence (#14405)

Co-authored-by: Erick Friis <erick@langchain.dev>
This commit is contained in:
Li-Lun Lin 2024-01-03 10:49:01 +08:00 committed by GitHub
parent 4dab37741a
commit cda68d717c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 3 additions and 3 deletions

View File

@ -48,7 +48,7 @@ def _get_token_ids_default_method(text: str) -> List[int]:
return tokenizer.encode(text)
LanguageModelInput = Union[PromptValue, str, List[BaseMessage]]
LanguageModelInput = Union[PromptValue, str, Sequence[BaseMessage]]
LanguageModelOutput = Union[BaseMessage, str]
LanguageModelLike = Runnable[LanguageModelInput, LanguageModelOutput]
LanguageModelOutputVar = TypeVar("LanguageModelOutputVar", BaseMessage, str)

View File

@ -142,7 +142,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
return input
elif isinstance(input, str):
return StringPromptValue(text=input)
elif isinstance(input, list):
elif isinstance(input, Sequence):
return ChatPromptValue(messages=input)
else:
raise ValueError(

View File

@ -205,7 +205,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
return input
elif isinstance(input, str):
return StringPromptValue(text=input)
elif isinstance(input, list):
elif isinstance(input, Sequence):
return ChatPromptValue(messages=input)
else:
raise ValueError(