From 96b47e18e04c6fab7b8daf56e75fc4540503f8ee Mon Sep 17 00:00:00 2001 From: Bagatur <22008038+baskaryan@users.noreply.github.com> Date: Fri, 5 Jan 2024 18:24:11 -0500 Subject: [PATCH] core[patch]: Release 0.1.7 (#15610) --- libs/core/langchain_core/language_models/base.py | 8 ++++---- .../langchain_core/language_models/chat_models.py | 12 ++++++------ libs/core/langchain_core/language_models/llms.py | 9 +++++---- libs/core/pyproject.toml | 2 +- 4 files changed, 16 insertions(+), 15 deletions(-) diff --git a/libs/core/langchain_core/language_models/base.py b/libs/core/langchain_core/language_models/base.py index 1676b74a32..577b277d5a 100644 --- a/libs/core/langchain_core/language_models/base.py +++ b/libs/core/langchain_core/language_models/base.py @@ -150,7 +150,7 @@ class BaseLanguageModel( prompt and additional model provider-specific output. """ - @deprecated("0.1.0", alternative="invoke", removal="0.2.0") + @deprecated("0.1.7", alternative="invoke", removal="0.2.0") @abstractmethod def predict( self, text: str, *, stop: Optional[Sequence[str]] = None, **kwargs: Any @@ -171,7 +171,7 @@ class BaseLanguageModel( Top model prediction as a string. """ - @deprecated("0.1.0", alternative="invoke", removal="0.2.0") + @deprecated("0.1.7", alternative="invoke", removal="0.2.0") @abstractmethod def predict_messages( self, @@ -196,7 +196,7 @@ class BaseLanguageModel( Top model prediction as a message. """ - @deprecated("0.1.0", alternative="ainvoke", removal="0.2.0") + @deprecated("0.1.7", alternative="ainvoke", removal="0.2.0") @abstractmethod async def apredict( self, text: str, *, stop: Optional[Sequence[str]] = None, **kwargs: Any @@ -217,7 +217,7 @@ class BaseLanguageModel( Top model prediction as a string. """ - @deprecated("0.1.0", alternative="ainvoke", removal="0.2.0") + @deprecated("0.1.7", alternative="ainvoke", removal="0.2.0") @abstractmethod async def apredict_messages( self, diff --git a/libs/core/langchain_core/language_models/chat_models.py b/libs/core/langchain_core/language_models/chat_models.py index 0dbac87fee..72320e2cb2 100644 --- a/libs/core/langchain_core/language_models/chat_models.py +++ b/libs/core/langchain_core/language_models/chat_models.py @@ -679,7 +679,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC): ) -> AsyncIterator[ChatGenerationChunk]: raise NotImplementedError() - @deprecated("0.1.0", alternative="invoke", removal="0.2.0") + @deprecated("0.1.7", alternative="invoke", removal="0.2.0") def __call__( self, messages: List[BaseMessage], @@ -711,13 +711,13 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC): else: raise ValueError("Unexpected generation type") - @deprecated("0.1.0", alternative="invoke", removal="0.2.0") + @deprecated("0.1.7", alternative="invoke", removal="0.2.0") def call_as_llm( self, message: str, stop: Optional[List[str]] = None, **kwargs: Any ) -> str: return self.predict(message, stop=stop, **kwargs) - @deprecated("0.1.0", alternative="invoke", removal="0.2.0") + @deprecated("0.1.7", alternative="invoke", removal="0.2.0") def predict( self, text: str, *, stop: Optional[Sequence[str]] = None, **kwargs: Any ) -> str: @@ -731,7 +731,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC): else: raise ValueError("Cannot use predict when output is not a string.") - @deprecated("0.1.0", alternative="invoke", removal="0.2.0") + @deprecated("0.1.7", alternative="invoke", removal="0.2.0") def predict_messages( self, messages: List[BaseMessage], @@ -745,7 +745,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC): _stop = list(stop) return self(messages, stop=_stop, **kwargs) - @deprecated("0.1.0", alternative="ainvoke", removal="0.2.0") + @deprecated("0.1.7", alternative="ainvoke", removal="0.2.0") async def apredict( self, text: str, *, stop: Optional[Sequence[str]] = None, **kwargs: Any ) -> str: @@ -761,7 +761,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC): else: raise ValueError("Cannot use predict when output is not a string.") - @deprecated("0.1.0", alternative="ainvoke", removal="0.2.0") + @deprecated("0.1.7", alternative="ainvoke", removal="0.2.0") async def apredict_messages( self, messages: List[BaseMessage], diff --git a/libs/core/langchain_core/language_models/llms.py b/libs/core/langchain_core/language_models/llms.py index 007437070e..2b07c84027 100644 --- a/libs/core/langchain_core/language_models/llms.py +++ b/libs/core/langchain_core/language_models/llms.py @@ -926,6 +926,7 @@ class BaseLLM(BaseLanguageModel[str], ABC): generations = [existing_prompts[i] for i in range(len(prompts))] return LLMResult(generations=generations, llm_output=llm_output, run=run_info) + @deprecated("0.1.7", alternative="invoke", removal="0.2.0") def __call__( self, prompt: str, @@ -977,7 +978,7 @@ class BaseLLM(BaseLanguageModel[str], ABC): ) return result.generations[0][0].text - @deprecated("0.1.0", alternative="invoke", removal="0.2.0") + @deprecated("0.1.7", alternative="invoke", removal="0.2.0") def predict( self, text: str, *, stop: Optional[Sequence[str]] = None, **kwargs: Any ) -> str: @@ -987,7 +988,7 @@ class BaseLLM(BaseLanguageModel[str], ABC): _stop = list(stop) return self(text, stop=_stop, **kwargs) - @deprecated("0.1.0", alternative="invoke", removal="0.2.0") + @deprecated("0.1.7", alternative="invoke", removal="0.2.0") def predict_messages( self, messages: List[BaseMessage], @@ -1003,7 +1004,7 @@ class BaseLLM(BaseLanguageModel[str], ABC): content = self(text, stop=_stop, **kwargs) return AIMessage(content=content) - @deprecated("0.1.0", alternative="ainvoke", removal="0.2.0") + @deprecated("0.1.7", alternative="ainvoke", removal="0.2.0") async def apredict( self, text: str, *, stop: Optional[Sequence[str]] = None, **kwargs: Any ) -> str: @@ -1013,7 +1014,7 @@ class BaseLLM(BaseLanguageModel[str], ABC): _stop = list(stop) return await self._call_async(text, stop=_stop, **kwargs) - @deprecated("0.1.0", alternative="ainvoke", removal="0.2.0") + @deprecated("0.1.7", alternative="ainvoke", removal="0.2.0") async def apredict_messages( self, messages: List[BaseMessage], diff --git a/libs/core/pyproject.toml b/libs/core/pyproject.toml index c14e1ae798..946e2812de 100644 --- a/libs/core/pyproject.toml +++ b/libs/core/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langchain-core" -version = "0.1.6" +version = "0.1.7" description = "Building applications with LLMs through composability" authors = [] license = "MIT"