docs: updated ChatLiteLLM model_kwargs description (#29937)

- [x] **PR title**: docs: (community) update ChatLiteLLM

- [x] **PR message**:
- **Description:** updated description of model_kwargs parameter which
was wrongly describing for temperature.
    - **Issue:** #29862 
    - **Dependencies:** N/A
    
- [x] **Add tests and docs**: N/A

- [x] **Lint and test**: Run `make format`, `make lint` and `make test`
from the root of the package(s) you've modified. See contribution
guidelines for more: https://python.langchain.com/docs/contributing/

---------

Co-authored-by: Chester Curme <chester.curme@gmail.com>
This commit is contained in:
Saraswathy Kalaiselvan 2025-02-23 20:27:13 +01:00 committed by GitHub
parent 512eb1b764
commit 5ca4933b9d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 4 additions and 2 deletions

View File

@ -214,9 +214,10 @@ class ChatDeepInfra(BaseChatModel):
deepinfra_api_token: Optional[str] = None
request_timeout: Optional[float] = Field(default=None, alias="timeout")
temperature: Optional[float] = 1
model_kwargs: Dict[str, Any] = Field(default_factory=dict)
"""Run inference with this temperature. Must be in the closed
interval [0.0, 1.0]."""
model_kwargs: Dict[str, Any] = Field(default_factory=dict)
"""Holds any model parameters valid for API call not explicitly specified."""
top_p: Optional[float] = None
"""Decode using nucleus sampling: consider the smallest set of tokens whose
probability sum is at least top_p. Must be in the closed interval [0.0, 1.0]."""

View File

@ -260,9 +260,10 @@ class ChatLiteLLM(BaseChatModel):
custom_llm_provider: Optional[str] = None
request_timeout: Optional[Union[float, Tuple[float, float]]] = None
temperature: Optional[float] = 1
model_kwargs: Dict[str, Any] = Field(default_factory=dict)
"""Run inference with this temperature. Must be in the closed
interval [0.0, 1.0]."""
model_kwargs: Dict[str, Any] = Field(default_factory=dict)
"""Holds any model parameters valid for API call not explicitly specified."""
top_p: Optional[float] = None
"""Decode using nucleus sampling: consider the smallest set of tokens whose
probability sum is at least top_p. Must be in the closed interval [0.0, 1.0]."""