mirror of
https://github.com/hwchase17/langchain.git
synced 2025-09-08 14:31:55 +00:00
fix(community): allow support for disabling max_tokens args (#21534)
This PR fixes an issue with not able to use unlimited/infinity tokens from the respective provider for the LiteLLM provider. This is an issue when working in an agent environment that the token usage can drastically increase beyond the initial value set causing unexpected behavior.
This commit is contained in:
@@ -191,7 +191,7 @@ class ChatLiteLLM(BaseChatModel):
|
||||
n: int = 1
|
||||
"""Number of chat completions to generate for each prompt. Note that the API may
|
||||
not return the full n completions if duplicates are generated."""
|
||||
max_tokens: int = 256
|
||||
max_tokens: Optional[int] = None
|
||||
|
||||
max_retries: int = 6
|
||||
|
||||
|
Reference in New Issue
Block a user