From 83a53e2126f1969da5c2d3a3376040d6a5492060 Mon Sep 17 00:00:00 2001 From: jacobswe Date: Wed, 26 Jul 2023 13:11:50 -0400 Subject: [PATCH] Bug Fix: AzureChatOpenAI streaming with function calls (#8300) - Description: During streaming, the first chunk may only contain the name of an OpenAI function and not any arguments. In this case, the current code presumes there is a streaming response and tries to append to it, but gets a KeyError. This fixes that case by checking if the arguments key exists, and if not, creates a new entry instead of appending. - Issue: Related to #6462 Sample Code: ```python llm = AzureChatOpenAI( deployment_name=deployment_name, model_name=model_name, streaming=True ) tools = [PythonREPLTool()] callbacks = [StreamingStdOutCallbackHandler()] agent = initialize_agent( tools=tools, llm=llm, agent=AgentType.OPENAI_FUNCTIONS, callbacks=callbacks ) agent('Run some python code to test your interpreter') ``` Previous Result: ``` File ...langchain/chat_models/openai.py:344, in ChatOpenAI._generate(self, messages, stop, run_manager, **kwargs) 342 function_call = _function_call 343 else: --> 344 function_call["arguments"] += _function_call["arguments"] 345 if run_manager: 346 run_manager.on_llm_new_token(token) KeyError: 'arguments' ``` New Result: ```python {'input': 'Run some python code to test your interpreter', 'output': "The Python code `print('Hello, World!')` has been executed successfully, and the output `Hello, World!` has been printed."} ``` Co-authored-by: jswe --- libs/langchain/langchain/chat_models/openai.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/libs/langchain/langchain/chat_models/openai.py b/libs/langchain/langchain/chat_models/openai.py index 92a6da820b..f1a1efd977 100644 --- a/libs/langchain/langchain/chat_models/openai.py +++ b/libs/langchain/langchain/chat_models/openai.py @@ -340,8 +340,10 @@ class ChatOpenAI(BaseChatModel): if _function_call: if function_call is None: function_call = _function_call - else: + elif "arguments" in function_call: function_call["arguments"] += _function_call["arguments"] + else: + function_call["arguments"] = _function_call["arguments"] if run_manager: run_manager.on_llm_new_token(token) message = _convert_dict_to_message( @@ -406,8 +408,10 @@ class ChatOpenAI(BaseChatModel): if _function_call: if function_call is None: function_call = _function_call - else: + elif "arguments" in function_call: function_call["arguments"] += _function_call["arguments"] + else: + function_call["arguments"] = _function_call["arguments"] if run_manager: await run_manager.on_llm_new_token(token) message = _convert_dict_to_message(