From c9f93f5f7416f719dea663192727ce43151118fe Mon Sep 17 00:00:00 2001 From: tmyjoe Date: Fri, 7 Apr 2023 23:27:03 +0900 Subject: [PATCH] fix: token counting for chat openai. (#2543) I noticed that the value of get_num_tokens_from_messages in `ChatOpenAI` is always one less than the response from OpenAI's API. Upon checking the official documentation, I found that it had been updated, so I made the necessary corrections. Then now I got the same value from OpenAI's API. https://github.com/openai/openai-cookbook/commit/d972e7482ed71d36e12f5b058380a61d516bc6d0#diff-2d4485035b3a3469802dbad11d7b4f834df0ea0e2790f418976b303bc82c1874L474 --- langchain/chat_models/openai.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/langchain/chat_models/openai.py b/langchain/chat_models/openai.py index 667934d8..6d510d4b 100644 --- a/langchain/chat_models/openai.py +++ b/langchain/chat_models/openai.py @@ -400,5 +400,5 @@ class ChatOpenAI(BaseChatModel): if key == "name": num_tokens += tokens_per_name # every reply is primed with assistant - num_tokens += 2 + num_tokens += 3 return num_tokens