mirror of
https://github.com/hwchase17/langchain
synced 2024-11-10 01:10:59 +00:00
fix(community): allow support for disabling max_tokens args (#21534)
This PR fixes an issue with not able to use unlimited/infinity tokens from the respective provider for the LiteLLM provider. This is an issue when working in an agent environment that the token usage can drastically increase beyond the initial value set causing unexpected behavior.
This commit is contained in:
parent
2a0d6788f7
commit
c6f700b7cb
@ -191,7 +191,7 @@ class ChatLiteLLM(BaseChatModel):
|
||||
n: int = 1
|
||||
"""Number of chat completions to generate for each prompt. Note that the API may
|
||||
not return the full n completions if duplicates are generated."""
|
||||
max_tokens: int = 256
|
||||
max_tokens: Optional[int] = None
|
||||
|
||||
max_retries: int = 6
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user