From c9d4d53545dd70c84046b031f771a28a4acb0fc1 Mon Sep 17 00:00:00 2001 From: Tarun Thotakura <39567014+thundersaiyan@users.noreply.github.com> Date: Wed, 11 Oct 2023 08:59:24 +0530 Subject: [PATCH] Fixed the assignment of custom_llm_provider argument (#11628) - **Description:** Assigning the custom_llm_provider to the default params function so that it will be passed to the litellm - **Issue:** Even though the custom_llm_provider argument is being defined it's not being assigned anywhere in the code and hence its not being passed to litellm, therefore any litellm call which uses the custom_llm_provider as required parameter is being failed. This parameter is mainly used by litellm when we are doing inference via Custom API server. https://docs.litellm.ai/docs/providers/custom_openai_proxy - **Dependencies:** No dependencies are required @krrishdholakia , @baskaryan --------- Co-authored-by: Bagatur --- libs/langchain/langchain/chat_models/litellm.py | 1 + 1 file changed, 1 insertion(+) diff --git a/libs/langchain/langchain/chat_models/litellm.py b/libs/langchain/langchain/chat_models/litellm.py index 1a35f1d47d..a499ece4f7 100644 --- a/libs/langchain/langchain/chat_models/litellm.py +++ b/libs/langchain/langchain/chat_models/litellm.py @@ -207,6 +207,7 @@ class ChatLiteLLM(BaseChatModel): "stream": self.streaming, "n": self.n, "temperature": self.temperature, + "custom_llm_provider": self.custom_llm_provider, **self.model_kwargs, }