From 8045870a0ff12a8eb884433bc6ee28b82a5522be Mon Sep 17 00:00:00 2001 From: Shuqian Date: Thu, 6 Jul 2023 01:00:35 +0800 Subject: [PATCH] fix: prevent adding an empty string to the result queue in AsyncIteratorCallbackHandler (#7180) - Description: Modify the code for AsyncIteratorCallbackHandler.on_llm_new_token to ensure that it does not add an empty string to the result queue. - Tag maintainer: @agola11 When using AsyncIteratorCallbackHandler with OpenAIFunctionsAgent, if the LLM response function_call instead of direct answer, the AsyncIteratorCallbackHandler.on_llm_new_token would be called with empty string. see also: langchain.chat_models.openai.ChatOpenAI._generate An alternative solution is to modify the langchain.chat_models.openai.ChatOpenAI._generate and do not call the run_manager.on_llm_new_token when the token is empty string. I am not sure which solution is better. @hwchase17 --------- Co-authored-by: Harrison Chase --- langchain/callbacks/streaming_aiter.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/langchain/callbacks/streaming_aiter.py b/langchain/callbacks/streaming_aiter.py index cc66d9a406..6e791a64b5 100644 --- a/langchain/callbacks/streaming_aiter.py +++ b/langchain/callbacks/streaming_aiter.py @@ -31,7 +31,8 @@ class AsyncIteratorCallbackHandler(AsyncCallbackHandler): self.done.clear() async def on_llm_new_token(self, token: str, **kwargs: Any) -> None: - self.queue.put_nowait(token) + if token is not None and token != "": + self.queue.put_nowait(token) async def on_llm_end(self, response: LLMResult, **kwargs: Any) -> None: self.done.set()