From 4cd5cf2e95afaa90c07f806137070b389a4c80e5 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Tue, 28 Mar 2023 07:59:40 -0700 Subject: [PATCH] notebook for tokens (#2086) --- .../models/llms/examples/token_usage_tracking.ipynb | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/docs/modules/models/llms/examples/token_usage_tracking.ipynb b/docs/modules/models/llms/examples/token_usage_tracking.ipynb index e8449e9e..b0caaced 100644 --- a/docs/modules/models/llms/examples/token_usage_tracking.ipynb +++ b/docs/modules/models/llms/examples/token_usage_tracking.ipynb @@ -25,7 +25,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 3, "id": "d1c55cc9", "metadata": {}, "outputs": [], @@ -35,7 +35,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 5, "id": "31667d54", "metadata": {}, "outputs": [ @@ -43,14 +43,18 @@ "name": "stdout", "output_type": "stream", "text": [ - "42\n" + "Total Tokens: 42\n", + "Prompt Tokens: 4\n", + "Completion Tokens: 38\n" ] } ], "source": [ "with get_openai_callback() as cb:\n", " result = llm(\"Tell me a joke\")\n", - " print(cb.total_tokens)" + " print(f\"Total Tokens: {cb.total_tokens}\")\n", + " print(f\"Prompt Tokens: {cb.prompt_tokens}\")\n", + " print(f\"Completion Tokens: {cb.completion_tokens}\")" ] }, {