Add easy print method to openai callback (#2848)

Found myself constantly copying the snippet outputting all the callback
tracking details. so adding a simple way to output the full context
fix_agent_callbacks
Tim Asp 1 year ago committed by GitHub
parent be4fb24b32
commit 70ffe470aa
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -43,22 +43,18 @@
"name": "stdout",
"output_type": "stream",
"text": [
"Total Tokens: 39\n",
"Prompt Tokens: 4\n",
"Completion Tokens: 35\n",
"Tokens Used: 42\n",
"\tPrompt Tokens: 4\n",
"\tCompletion Tokens: 38\n",
"Successful Requests: 1\n",
"Total Cost (USD): $0.0007800000000000001\n"
"Total Cost (USD): $0.00084\n"
]
}
],
"source": [
"with get_openai_callback() as cb:\n",
" result = llm(\"Tell me a joke\")\n",
" print(f\"Total Tokens: {cb.total_tokens}\")\n",
" print(f\"Prompt Tokens: {cb.prompt_tokens}\")\n",
" print(f\"Completion Tokens: {cb.completion_tokens}\")\n",
" print(f\"Successful Requests: {cb.successful_requests}\")\n",
" print(f\"Total Cost (USD): ${cb.total_cost}\")"
" print(cb)"
]
},
{

@ -53,6 +53,15 @@ class OpenAICallbackHandler(BaseCallbackHandler):
successful_requests: int = 0
total_cost: float = 0.0
def __repr__(self) -> str:
return (
f"Tokens Used: {self.total_tokens}\n"
f"\tPrompt Tokens: {self.prompt_tokens}\n"
f"\tCompletion Tokens: {self.completion_tokens}\n"
f"Successful Requests: {self.successful_requests}\n"
f"Total Cost (USD): ${self.total_cost}"
)
@property
def always_verbose(self) -> bool:
"""Whether to call verbose callbacks even if verbose is False."""

Loading…
Cancel
Save