From c5b50be225e9d2139254bd15e1bf5aa79aa6ef97 Mon Sep 17 00:00:00 2001 From: William FH <13333726+hinthornw@users.noreply.github.com> Date: Sun, 23 Jul 2023 18:01:33 -0700 Subject: [PATCH] Function calling logging fixup (#8153) Fix bad overwriting of "functions" arg in invocation params. Cleanup precedence in the dict Clean up some inappropriate types (mapping should be dict) Example: https://dev.smith.langchain.com/public/9a7a6817-1679-49d8-8775-c13916975aae/r ![image](https://github.com/langchain-ai/langchain/assets/13333726/94cd0775-b6ef-40c3-9e5a-3ab65e466ab9) --- .../langchain/chat_models/azure_openai.py | 6 +++--- libs/langchain/langchain/chat_models/base.py | 4 ++-- libs/langchain/langchain/chat_models/fake.py | 4 ++-- .../langchain/chat_models/google_palm.py | 6 ++---- libs/langchain/langchain/chat_models/human.py | 4 ++-- libs/langchain/langchain/chat_models/openai.py | 16 ++++++++-------- .../langchain/chat_models/promptlayer_openai.py | 4 ++-- .../tests/unit_tests/agents/test_agent.py | 4 ++-- .../tests/unit_tests/chains/test_natbot.py | 4 ++-- .../tests/unit_tests/llms/fake_chat_model.py | 4 ++-- libs/langchain/tests/unit_tests/llms/fake_llm.py | 4 ++-- 11 files changed, 29 insertions(+), 31 deletions(-) diff --git a/libs/langchain/langchain/chat_models/azure_openai.py b/libs/langchain/langchain/chat_models/azure_openai.py index 91334d188a..31095fe518 100644 --- a/libs/langchain/langchain/chat_models/azure_openai.py +++ b/libs/langchain/langchain/chat_models/azure_openai.py @@ -116,18 +116,18 @@ class AzureChatOpenAI(ChatOpenAI): } @property - def _identifying_params(self) -> Mapping[str, Any]: + def _identifying_params(self) -> Dict[str, Any]: """Get the identifying parameters.""" return {**self._default_params} @property def _client_params(self) -> Dict[str, Any]: """Get the config params used for the openai client.""" - openai_creds = { + return { + **super()._client_params, "api_type": self.openai_api_type, "api_version": self.openai_api_version, } - return {**super()._client_params, **openai_creds} @property def _llm_type(self) -> str: diff --git a/libs/langchain/langchain/chat_models/base.py b/libs/langchain/langchain/chat_models/base.py index 239ae5689a..a7844e38c4 100644 --- a/libs/langchain/langchain/chat_models/base.py +++ b/libs/langchain/langchain/chat_models/base.py @@ -3,7 +3,7 @@ import inspect import warnings from abc import ABC, abstractmethod from functools import partial -from typing import Any, Dict, List, Mapping, Optional, Sequence +from typing import Any, Dict, List, Optional, Sequence from pydantic import Field, root_validator @@ -424,7 +424,7 @@ class BaseChatModel(BaseLanguageModel, ABC): return await self._call_async(messages, stop=_stop, **kwargs) @property - def _identifying_params(self) -> Mapping[str, Any]: + def _identifying_params(self) -> Dict[str, Any]: """Get the identifying parameters.""" return {} diff --git a/libs/langchain/langchain/chat_models/fake.py b/libs/langchain/langchain/chat_models/fake.py index a974fe592a..5f370c9114 100644 --- a/libs/langchain/langchain/chat_models/fake.py +++ b/libs/langchain/langchain/chat_models/fake.py @@ -1,5 +1,5 @@ """Fake ChatModel for testing purposes.""" -from typing import Any, List, Mapping, Optional +from typing import Any, Dict, List, Optional from langchain.callbacks.manager import CallbackManagerForLLMRun from langchain.chat_models.base import SimpleChatModel @@ -29,5 +29,5 @@ class FakeListChatModel(SimpleChatModel): return response @property - def _identifying_params(self) -> Mapping[str, Any]: + def _identifying_params(self) -> Dict[str, Any]: return {"responses": self.responses} diff --git a/libs/langchain/langchain/chat_models/google_palm.py b/libs/langchain/langchain/chat_models/google_palm.py index 8a3780ce9a..e180030e74 100644 --- a/libs/langchain/langchain/chat_models/google_palm.py +++ b/libs/langchain/langchain/chat_models/google_palm.py @@ -2,7 +2,7 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, Any, Callable, Dict, List, Mapping, Optional +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional from pydantic import BaseModel, root_validator from tenacity import ( @@ -40,8 +40,6 @@ logger = logging.getLogger(__name__) class ChatGooglePalmError(Exception): """Error raised when there is an issue with the Google PaLM API.""" - pass - def _truncate_at_stop_tokens( text: str, @@ -323,7 +321,7 @@ class ChatGooglePalm(BaseChatModel, BaseModel): return _response_to_result(response, stop) @property - def _identifying_params(self) -> Mapping[str, Any]: + def _identifying_params(self) -> Dict[str, Any]: """Get the identifying parameters.""" return { "model_name": self.model_name, diff --git a/libs/langchain/langchain/chat_models/human.py b/libs/langchain/langchain/chat_models/human.py index 80721aacac..20fb099c20 100644 --- a/libs/langchain/langchain/chat_models/human.py +++ b/libs/langchain/langchain/chat_models/human.py @@ -2,7 +2,7 @@ import asyncio from functools import partial from io import StringIO -from typing import Any, Callable, List, Mapping, Optional +from typing import Any, Callable, Dict, List, Mapping, Optional import yaml from pydantic import Field @@ -76,7 +76,7 @@ class HumanInputChatModel(BaseChatModel): message_kwargs: Mapping[str, Any] = {} @property - def _identifying_params(self) -> Mapping[str, Any]: + def _identifying_params(self) -> Dict[str, Any]: return { "input_func": self.input_func.__name__, "message_func": self.message_func.__name__, diff --git a/libs/langchain/langchain/chat_models/openai.py b/libs/langchain/langchain/chat_models/openai.py index 28d59182c1..a3071b8d6b 100644 --- a/libs/langchain/langchain/chat_models/openai.py +++ b/libs/langchain/langchain/chat_models/openai.py @@ -355,7 +355,7 @@ class ChatOpenAI(BaseChatModel): def _create_message_dicts( self, messages: List[BaseMessage], stop: Optional[List[str]] ) -> Tuple[List[Dict[str, Any]], Dict[str, Any]]: - params = dict(self._client_params) + params = self._client_params if stop is not None: if "stop" in params: raise ValueError("`stop` found in both the input and default params.") @@ -419,12 +419,12 @@ class ChatOpenAI(BaseChatModel): return self._create_chat_result(response) @property - def _identifying_params(self) -> Mapping[str, Any]: + def _identifying_params(self) -> Dict[str, Any]: """Get the identifying parameters.""" return {**{"model_name": self.model_name}, **self._default_params} @property - def _client_params(self) -> Mapping[str, Any]: + def _client_params(self) -> Dict[str, Any]: """Get the parameters used for the openai client.""" openai_creds: Dict[str, Any] = { "api_key": self.openai_api_key, @@ -436,17 +436,17 @@ class ChatOpenAI(BaseChatModel): import openai openai.proxy = {"http": self.openai_proxy, "https": self.openai_proxy} # type: ignore[assignment] # noqa: E501 - return {**openai_creds, **self._default_params} + return {**self._default_params, **openai_creds} def _get_invocation_params( self, stop: Optional[List[str]] = None, **kwargs: Any ) -> Dict[str, Any]: - """Get the parameters used to invoke the model FOR THE CALLBACKS.""" + """Get the parameters used to invoke the model.""" return { - **super()._get_invocation_params(stop=stop, **kwargs), - **self._default_params, "model": self.model_name, - "function": kwargs.get("functions"), + **super()._get_invocation_params(stop=stop), + **self._default_params, + **kwargs, } @property diff --git a/libs/langchain/langchain/chat_models/promptlayer_openai.py b/libs/langchain/langchain/chat_models/promptlayer_openai.py index fd73674336..2780888d85 100644 --- a/libs/langchain/langchain/chat_models/promptlayer_openai.py +++ b/libs/langchain/langchain/chat_models/promptlayer_openai.py @@ -1,6 +1,6 @@ """PromptLayer wrapper.""" import datetime -from typing import Any, List, Mapping, Optional +from typing import Any, Dict, List, Optional from langchain.callbacks.manager import ( AsyncCallbackManagerForLLMRun, @@ -121,7 +121,7 @@ class PromptLayerChatOpenAI(ChatOpenAI): return "promptlayer-openai-chat" @property - def _identifying_params(self) -> Mapping[str, Any]: + def _identifying_params(self) -> Dict[str, Any]: return { **super()._identifying_params, "pl_tags": self.pl_tags, diff --git a/libs/langchain/tests/unit_tests/agents/test_agent.py b/libs/langchain/tests/unit_tests/agents/test_agent.py index 2405da88f9..cc4ab557c4 100644 --- a/libs/langchain/tests/unit_tests/agents/test_agent.py +++ b/libs/langchain/tests/unit_tests/agents/test_agent.py @@ -1,6 +1,6 @@ """Unit tests for agents.""" -from typing import Any, List, Mapping, Optional +from typing import Any, Dict, List, Optional from langchain.agents import AgentExecutor, AgentType, initialize_agent from langchain.agents.tools import Tool @@ -36,7 +36,7 @@ class FakeListLLM(LLM): return self._call(*args, **kwargs) @property - def _identifying_params(self) -> Mapping[str, Any]: + def _identifying_params(self) -> Dict[str, Any]: return {} @property diff --git a/libs/langchain/tests/unit_tests/chains/test_natbot.py b/libs/langchain/tests/unit_tests/chains/test_natbot.py index 0cc5a64970..1aaa0ee1cd 100644 --- a/libs/langchain/tests/unit_tests/chains/test_natbot.py +++ b/libs/langchain/tests/unit_tests/chains/test_natbot.py @@ -1,6 +1,6 @@ """Test functionality related to natbot.""" -from typing import Any, List, Mapping, Optional +from typing import Any, Dict, List, Optional from langchain.callbacks.manager import CallbackManagerForLLMRun from langchain.chains.natbot.base import NatBotChain @@ -32,7 +32,7 @@ class FakeLLM(LLM): return len(text.split()) @property - def _identifying_params(self) -> Mapping[str, Any]: + def _identifying_params(self) -> Dict[str, Any]: return {} diff --git a/libs/langchain/tests/unit_tests/llms/fake_chat_model.py b/libs/langchain/tests/unit_tests/llms/fake_chat_model.py index 5b8218720d..8c64574d86 100644 --- a/libs/langchain/tests/unit_tests/llms/fake_chat_model.py +++ b/libs/langchain/tests/unit_tests/llms/fake_chat_model.py @@ -1,5 +1,5 @@ """Fake Chat Model wrapper for testing purposes.""" -from typing import Any, List, Mapping, Optional +from typing import Any, Dict, List, Optional from langchain.callbacks.manager import ( AsyncCallbackManagerForLLMRun, @@ -39,5 +39,5 @@ class FakeChatModel(SimpleChatModel): return "fake-chat-model" @property - def _identifying_params(self) -> Mapping[str, Any]: + def _identifying_params(self) -> Dict[str, Any]: return {"key": "fake"} diff --git a/libs/langchain/tests/unit_tests/llms/fake_llm.py b/libs/langchain/tests/unit_tests/llms/fake_llm.py index 33e88d4670..e7fc0d6dec 100644 --- a/libs/langchain/tests/unit_tests/llms/fake_llm.py +++ b/libs/langchain/tests/unit_tests/llms/fake_llm.py @@ -1,5 +1,5 @@ """Fake LLM wrapper for testing purposes.""" -from typing import Any, List, Mapping, Optional, cast +from typing import Any, Dict, List, Mapping, Optional, cast from pydantic import validator @@ -51,7 +51,7 @@ class FakeLLM(LLM): return "bar" @property - def _identifying_params(self) -> Mapping[str, Any]: + def _identifying_params(self) -> Dict[str, Any]: return {} @property