From 1a4ca3eff957c6b3a8523148f84b2dbf44d3fb9f Mon Sep 17 00:00:00 2001 From: Bruno Bornsztein Date: Thu, 6 Jul 2023 14:13:51 -0500 Subject: [PATCH] handle missing finish_reason (#7296) In some cases, the OpenAI response is missing the `finish_reason` attribute. It seems to happen when using Ada or Babbage and `stream=true`, but I can't always reproduce it. This change just gracefully handles the missing key. --- langchain/llms/openai.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/langchain/llms/openai.py b/langchain/llms/openai.py index 3b497b2fd9..ef08c2551a 100644 --- a/langchain/llms/openai.py +++ b/langchain/llms/openai.py @@ -48,9 +48,9 @@ def update_token_usage( def _update_response(response: Dict[str, Any], stream_response: Dict[str, Any]) -> None: """Update response from the stream response.""" response["choices"][0]["text"] += stream_response["choices"][0]["text"] - response["choices"][0]["finish_reason"] = stream_response["choices"][0][ - "finish_reason" - ] + response["choices"][0]["finish_reason"] = stream_response["choices"][0].get( + "finish_reason", None + ) response["choices"][0]["logprobs"] = stream_response["choices"][0]["logprobs"]