mirror of
https://github.com/hwchase17/langchain
synced 2024-11-08 07:10:35 +00:00
Improvement/add finish reason to generation info in chat open ai (#7478)
Description: ChatOpenAI model does not return finish_reason in generation_info. Issue: #2702 Dependencies: None Tag maintainer: @baskaryan Thank you --------- Co-authored-by: Bagatur <baskaryan@gmail.com>
This commit is contained in:
parent
b96ac13f3d
commit
cc0585af42
@ -386,7 +386,10 @@ class ChatOpenAI(BaseChatModel):
|
||||
generations = []
|
||||
for res in response["choices"]:
|
||||
message = _convert_dict_to_message(res["message"])
|
||||
gen = ChatGeneration(message=message)
|
||||
gen = ChatGeneration(
|
||||
message=message,
|
||||
generation_info=dict(finish_reason=res.get("finish_reason")),
|
||||
)
|
||||
generations.append(gen)
|
||||
llm_output = {"token_usage": response["usage"], "model_name": self.model_name}
|
||||
return ChatResult(generations=generations, llm_output=llm_output)
|
||||
|
Loading…
Reference in New Issue
Block a user