diff --git a/libs/community/langchain_community/chat_models/deepinfra.py b/libs/community/langchain_community/chat_models/deepinfra.py index 546171f8762..71b619ad28f 100644 --- a/libs/community/langchain_community/chat_models/deepinfra.py +++ b/libs/community/langchain_community/chat_models/deepinfra.py @@ -214,9 +214,10 @@ class ChatDeepInfra(BaseChatModel): deepinfra_api_token: Optional[str] = None request_timeout: Optional[float] = Field(default=None, alias="timeout") temperature: Optional[float] = 1 - model_kwargs: Dict[str, Any] = Field(default_factory=dict) """Run inference with this temperature. Must be in the closed interval [0.0, 1.0].""" + model_kwargs: Dict[str, Any] = Field(default_factory=dict) + """Holds any model parameters valid for API call not explicitly specified.""" top_p: Optional[float] = None """Decode using nucleus sampling: consider the smallest set of tokens whose probability sum is at least top_p. Must be in the closed interval [0.0, 1.0].""" diff --git a/libs/community/langchain_community/chat_models/litellm.py b/libs/community/langchain_community/chat_models/litellm.py index d836a9d02fd..818988e31af 100644 --- a/libs/community/langchain_community/chat_models/litellm.py +++ b/libs/community/langchain_community/chat_models/litellm.py @@ -260,9 +260,10 @@ class ChatLiteLLM(BaseChatModel): custom_llm_provider: Optional[str] = None request_timeout: Optional[Union[float, Tuple[float, float]]] = None temperature: Optional[float] = 1 - model_kwargs: Dict[str, Any] = Field(default_factory=dict) """Run inference with this temperature. Must be in the closed interval [0.0, 1.0].""" + model_kwargs: Dict[str, Any] = Field(default_factory=dict) + """Holds any model parameters valid for API call not explicitly specified.""" top_p: Optional[float] = None """Decode using nucleus sampling: consider the smallest set of tokens whose probability sum is at least top_p. Must be in the closed interval [0.0, 1.0]."""