forked from Archives/langchain
fix bug with openai token count (#1806)
This commit is contained in:
parent
b1c4480d7c
commit
f6d24d5740
@ -229,7 +229,8 @@ class ChatOpenAI(BaseChatModel, BaseModel):
|
||||
overall_token_usage: dict = {}
|
||||
for output in llm_outputs:
|
||||
if output is None:
|
||||
raise ValueError("Should always be something for OpenAI.")
|
||||
# Happens in streaming
|
||||
continue
|
||||
token_usage = output["token_usage"]
|
||||
for k, v in token_usage.items():
|
||||
if k in overall_token_usage:
|
||||
|
Loading…
Reference in New Issue
Block a user