diff --git a/libs/partners/openai/langchain_openai/chat_models/base.py b/libs/partners/openai/langchain_openai/chat_models/base.py index ca54ce32e6..47c89929ea 100644 --- a/libs/partners/openai/langchain_openai/chat_models/base.py +++ b/libs/partners/openai/langchain_openai/chat_models/base.py @@ -947,7 +947,7 @@ class BaseChatOpenAI(BaseChatModel): else: # Cast str(value) in case the message value is not a string # This occurs with function messages - num_tokens += len(encoding.encode(value)) + num_tokens += len(encoding.encode(str(value))) if key == "name": num_tokens += tokens_per_name # every reply is primed with assistant diff --git a/libs/partners/openai/tests/unit_tests/chat_models/test_base.py b/libs/partners/openai/tests/unit_tests/chat_models/test_base.py index 4041718368..4e959f0059 100644 --- a/libs/partners/openai/tests/unit_tests/chat_models/test_base.py +++ b/libs/partners/openai/tests/unit_tests/chat_models/test_base.py @@ -677,7 +677,10 @@ def test_get_num_tokens_from_messages() -> None: AIMessage( "", additional_kwargs={ - "function_call": json.dumps({"arguments": "old", "name": "fun"}) + "function_call": { + "arguments": json.dumps({"arg1": "arg1"}), + "name": "fun", + } }, ), AIMessage( @@ -688,6 +691,6 @@ def test_get_num_tokens_from_messages() -> None: ), ToolMessage("foobar", tool_call_id="foo"), ] - expected = 170 + expected = 176 actual = llm.get_num_tokens_from_messages(messages) assert expected == actual