mirror of
https://github.com/hwchase17/langchain
synced 2024-11-16 06:13:16 +00:00
community: Add logprobs in gen output (#14826)
Now that it's supported again for OAI chat models . Shame this wouldn't include it in the `.invoke()` output though (it's not included in the message itself). Would need to do a follow-up for that to be the case
This commit is contained in:
parent
c316731d0f
commit
2d91d2b978
@ -454,9 +454,12 @@ class ChatOpenAI(BaseChatModel):
|
||||
response = response.dict()
|
||||
for res in response["choices"]:
|
||||
message = convert_dict_to_message(res["message"])
|
||||
generation_info = dict(finish_reason=res.get("finish_reason"))
|
||||
if "logprobs" in res:
|
||||
generation_info["logprobs"] = res["logprobs"]
|
||||
gen = ChatGeneration(
|
||||
message=message,
|
||||
generation_info=dict(finish_reason=res.get("finish_reason")),
|
||||
generation_info=generation_info,
|
||||
)
|
||||
generations.append(gen)
|
||||
token_usage = response.get("usage", {})
|
||||
|
Loading…
Reference in New Issue
Block a user