From c81c77b465afb900acd6cf47f6f288542141b125 Mon Sep 17 00:00:00 2001 From: blaufink <88979818+blaufink@users.noreply.github.com> Date: Fri, 9 Aug 2024 15:21:37 +0200 Subject: [PATCH] partners: fix of issue #24880 (#25229) - Description: As described in the related issue: There is an error occuring when using langchain-openai>=0.1.17 which can be attributed to the following PR: #23691 Here, the parameter logprobs is added to requests per default. However, AzureOpenAI takes issue with this parameter as stated here: https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/chatgpt?tabs=python-new&pivots=programming-language-chat-completions -> "If you set any of these parameters, you get an error." Therefore, this PR changes the default value of logprobs parameter to None instead of False. This results in it being filtered before the request is sent. - Issue: #24880 - Dependencies: / Co-authored-by: blaufink --- libs/partners/openai/langchain_openai/chat_models/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/partners/openai/langchain_openai/chat_models/base.py b/libs/partners/openai/langchain_openai/chat_models/base.py index f9f6841aaf..cd341a80c5 100644 --- a/libs/partners/openai/langchain_openai/chat_models/base.py +++ b/libs/partners/openai/langchain_openai/chat_models/base.py @@ -332,7 +332,7 @@ class BaseChatOpenAI(BaseChatModel): """Penalizes repeated tokens according to frequency.""" seed: Optional[int] = None """Seed for generation""" - logprobs: Optional[bool] = False + logprobs: Optional[bool] = None """Whether to return logprobs.""" top_logprobs: Optional[int] = None """Number of most likely tokens to return at each token position, each with