- Description: Small change to fix broken Azure streaming. More complete
migration probably still necessary once the new API behavior is
finalized.
- Issue: Implements fix by @rock-you in #6462 
- Dependencies: N/A

There don't seem to be any tests specifically for this, and I was having
some trouble adding some. This is just a small temporary fix to allow
for the new API changes that OpenAI are releasing without breaking any
other code.

---------

Co-authored-by: Jacob Swe <jswe@polencapital.com>
Co-authored-by: Bagatur <baskaryan@gmail.com>
This commit is contained in:
jacobswe 2023-07-25 14:30:22 -04:00 committed by GitHub
parent c1ea8da9bc
commit 0af48b06d0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -330,17 +330,20 @@ class ChatOpenAI(BaseChatModel):
for stream_resp in self.completion_with_retry(
messages=message_dicts, **params
):
role = stream_resp["choices"][0]["delta"].get("role", role)
token = stream_resp["choices"][0]["delta"].get("content") or ""
inner_completion += token
_function_call = stream_resp["choices"][0]["delta"].get("function_call")
if _function_call:
if function_call is None:
function_call = _function_call
else:
function_call["arguments"] += _function_call["arguments"]
if run_manager:
run_manager.on_llm_new_token(token)
if len(stream_resp["choices"]) > 0:
role = stream_resp["choices"][0]["delta"].get("role", role)
token = stream_resp["choices"][0]["delta"].get("content") or ""
inner_completion += token
_function_call = stream_resp["choices"][0]["delta"].get(
"function_call"
)
if _function_call:
if function_call is None:
function_call = _function_call
else:
function_call["arguments"] += _function_call["arguments"]
if run_manager:
run_manager.on_llm_new_token(token)
message = _convert_dict_to_message(
{
"content": inner_completion,
@ -393,17 +396,20 @@ class ChatOpenAI(BaseChatModel):
async for stream_resp in await acompletion_with_retry(
self, messages=message_dicts, **params
):
role = stream_resp["choices"][0]["delta"].get("role", role)
token = stream_resp["choices"][0]["delta"].get("content", "")
inner_completion += token or ""
_function_call = stream_resp["choices"][0]["delta"].get("function_call")
if _function_call:
if function_call is None:
function_call = _function_call
else:
function_call["arguments"] += _function_call["arguments"]
if run_manager:
await run_manager.on_llm_new_token(token)
if len(stream_resp["choices"]) > 0:
role = stream_resp["choices"][0]["delta"].get("role", role)
token = stream_resp["choices"][0]["delta"].get("content", "")
inner_completion += token or ""
_function_call = stream_resp["choices"][0]["delta"].get(
"function_call"
)
if _function_call:
if function_call is None:
function_call = _function_call
else:
function_call["arguments"] += _function_call["arguments"]
if run_manager:
await run_manager.on_llm_new_token(token)
message = _convert_dict_to_message(
{
"content": inner_completion,