forked from Archives/langchain
Add incremental messages token count (#3890)
This commit is contained in:
parent
52e4fba897
commit
484707ad29
@ -70,6 +70,7 @@ class AutoGPTPrompt(BaseChatPromptTemplate, BaseModel):
|
|||||||
if used_tokens + message_tokens > self.send_token_limit - 1000:
|
if used_tokens + message_tokens > self.send_token_limit - 1000:
|
||||||
break
|
break
|
||||||
historical_messages = [message] + historical_messages
|
historical_messages = [message] + historical_messages
|
||||||
|
used_tokens += message_tokens
|
||||||
input_message = HumanMessage(content=kwargs["user_input"])
|
input_message = HumanMessage(content=kwargs["user_input"])
|
||||||
messages: List[BaseMessage] = [base_prompt, time_prompt, memory_message]
|
messages: List[BaseMessage] = [base_prompt, time_prompt, memory_message]
|
||||||
messages += historical_messages
|
messages += historical_messages
|
||||||
|
Loading…
Reference in New Issue
Block a user