Harrison/logprobs (#1279)

Co-authored-by: Prateek Shah <97124740+prateekspanning@users.noreply.github.com>
docker-utility-pexpect
Harrison Chase 1 year ago committed by GitHub
parent 42167a1e24
commit a8e88e1874
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -251,7 +251,9 @@ class BaseOpenAI(BaseLLM, BaseModel):
prompt=_prompts, **params prompt=_prompts, **params
): ):
self.callback_manager.on_llm_new_token( self.callback_manager.on_llm_new_token(
stream_resp["choices"][0]["text"], verbose=self.verbose stream_resp["choices"][0]["text"],
verbose=self.verbose,
logprobs=stream_resp["choices"][0]["logprobs"],
) )
_update_response(response, stream_resp) _update_response(response, stream_resp)
choices.extend(response["choices"]) choices.extend(response["choices"])
@ -285,11 +287,15 @@ class BaseOpenAI(BaseLLM, BaseModel):
): ):
if self.callback_manager.is_async: if self.callback_manager.is_async:
await self.callback_manager.on_llm_new_token( await self.callback_manager.on_llm_new_token(
stream_resp["choices"][0]["text"], verbose=self.verbose stream_resp["choices"][0]["text"],
verbose=self.verbose,
logprobs=stream_resp["choices"][0]["logprobs"],
) )
else: else:
self.callback_manager.on_llm_new_token( self.callback_manager.on_llm_new_token(
stream_resp["choices"][0]["text"], verbose=self.verbose stream_resp["choices"][0]["text"],
verbose=self.verbose,
logprobs=stream_resp["choices"][0]["logprobs"],
) )
_update_response(response, stream_resp) _update_response(response, stream_resp)
choices.extend(response["choices"]) choices.extend(response["choices"])

Loading…
Cancel
Save