From 5ca4933b9d64ef4dd8dd8985e08567cdadde3339 Mon Sep 17 00:00:00 2001 From: Saraswathy Kalaiselvan <42293506+SaraInCode@users.noreply.github.com> Date: Sun, 23 Feb 2025 20:27:13 +0100 Subject: [PATCH] docs: updated ChatLiteLLM model_kwargs description (#29937) - [x] **PR title**: docs: (community) update ChatLiteLLM - [x] **PR message**: - **Description:** updated description of model_kwargs parameter which was wrongly describing for temperature. - **Issue:** #29862 - **Dependencies:** N/A - [x] **Add tests and docs**: N/A - [x] **Lint and test**: Run `make format`, `make lint` and `make test` from the root of the package(s) you've modified. See contribution guidelines for more: https://python.langchain.com/docs/contributing/ --------- Co-authored-by: Chester Curme --- libs/community/langchain_community/chat_models/deepinfra.py | 3 ++- libs/community/langchain_community/chat_models/litellm.py | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/libs/community/langchain_community/chat_models/deepinfra.py b/libs/community/langchain_community/chat_models/deepinfra.py index 546171f8762..71b619ad28f 100644 --- a/libs/community/langchain_community/chat_models/deepinfra.py +++ b/libs/community/langchain_community/chat_models/deepinfra.py @@ -214,9 +214,10 @@ class ChatDeepInfra(BaseChatModel): deepinfra_api_token: Optional[str] = None request_timeout: Optional[float] = Field(default=None, alias="timeout") temperature: Optional[float] = 1 - model_kwargs: Dict[str, Any] = Field(default_factory=dict) """Run inference with this temperature. Must be in the closed interval [0.0, 1.0].""" + model_kwargs: Dict[str, Any] = Field(default_factory=dict) + """Holds any model parameters valid for API call not explicitly specified.""" top_p: Optional[float] = None """Decode using nucleus sampling: consider the smallest set of tokens whose probability sum is at least top_p. Must be in the closed interval [0.0, 1.0].""" diff --git a/libs/community/langchain_community/chat_models/litellm.py b/libs/community/langchain_community/chat_models/litellm.py index d836a9d02fd..818988e31af 100644 --- a/libs/community/langchain_community/chat_models/litellm.py +++ b/libs/community/langchain_community/chat_models/litellm.py @@ -260,9 +260,10 @@ class ChatLiteLLM(BaseChatModel): custom_llm_provider: Optional[str] = None request_timeout: Optional[Union[float, Tuple[float, float]]] = None temperature: Optional[float] = 1 - model_kwargs: Dict[str, Any] = Field(default_factory=dict) """Run inference with this temperature. Must be in the closed interval [0.0, 1.0].""" + model_kwargs: Dict[str, Any] = Field(default_factory=dict) + """Holds any model parameters valid for API call not explicitly specified.""" top_p: Optional[float] = None """Decode using nucleus sampling: consider the smallest set of tokens whose probability sum is at least top_p. Must be in the closed interval [0.0, 1.0]."""