deepseek[patch]: ruff fixes and rules (#31901)

* bump ruff deps
* add more thorough ruff rules
* fix said rules
This commit is contained in:
Mason Daugherty
2025-07-07 21:54:44 -04:00
committed by GitHub
parent 38bd1abb8c
commit 231e8d0f43
6 changed files with 126 additions and 41 deletions

View File

@@ -1,5 +1,7 @@
"""DeepSeek chat models."""
from __future__ import annotations
from collections.abc import Iterator
from json import JSONDecodeError
from typing import Any, Literal, Optional, TypeVar, Union
@@ -163,11 +165,11 @@ class ChatDeepSeek(BaseChatOpenAI):
model_name: str = Field(alias="model")
"""The name of the model"""
api_key: Optional[SecretStr] = Field(
default_factory=secret_from_env("DEEPSEEK_API_KEY", default=None)
default_factory=secret_from_env("DEEPSEEK_API_KEY", default=None),
)
"""DeepSeek API key"""
api_base: str = Field(
default_factory=from_env("DEEPSEEK_API_BASE", default=DEFAULT_API_BASE)
default_factory=from_env("DEEPSEEK_API_BASE", default=DEFAULT_API_BASE),
)
"""DeepSeek API base URL"""
@@ -188,7 +190,8 @@ class ChatDeepSeek(BaseChatOpenAI):
if self.api_base == DEFAULT_API_BASE and not (
self.api_key and self.api_key.get_secret_value()
):
raise ValueError("If using default api base, DEEPSEEK_API_KEY must be set.")
msg = "If using default api base, DEEPSEEK_API_KEY must be set."
raise ValueError(msg)
client_params: dict = {
k: v
for k, v in {
@@ -225,13 +228,14 @@ class ChatDeepSeek(BaseChatOpenAI):
if not isinstance(response, openai.BaseModel):
return rtn
if hasattr(response.choices[0].message, "reasoning_content"): # type: ignore
rtn.generations[0].message.additional_kwargs["reasoning_content"] = (
response.choices[0].message.reasoning_content # type: ignore
)
choices = getattr(response, "choices", None)
if choices and hasattr(choices[0].message, "reasoning_content"):
rtn.generations[0].message.additional_kwargs["reasoning_content"] = choices[
0
].message.reasoning_content
# Handle use via OpenRouter
elif hasattr(response.choices[0].message, "model_extra"): # type: ignore
model_extra = response.choices[0].message.model_extra # type: ignore
elif choices and hasattr(choices[0].message, "model_extra"):
model_extra = choices[0].message.model_extra
if isinstance(model_extra, dict) and (
reasoning := model_extra.get("reasoning")
):
@@ -278,12 +282,18 @@ class ChatDeepSeek(BaseChatOpenAI):
) -> Iterator[ChatGenerationChunk]:
try:
yield from super()._stream(
messages, stop=stop, run_manager=run_manager, **kwargs
messages,
stop=stop,
run_manager=run_manager,
**kwargs,
)
except JSONDecodeError as e:
raise JSONDecodeError(
msg = (
"DeepSeek API returned an invalid response. "
"Please check the API status and try again.",
"Please check the API status and try again."
)
raise JSONDecodeError(
msg,
e.doc,
e.pos,
) from e
@@ -297,12 +307,18 @@ class ChatDeepSeek(BaseChatOpenAI):
) -> ChatResult:
try:
return super()._generate(
messages, stop=stop, run_manager=run_manager, **kwargs
messages,
stop=stop,
run_manager=run_manager,
**kwargs,
)
except JSONDecodeError as e:
raise JSONDecodeError(
msg = (
"DeepSeek API returned an invalid response. "
"Please check the API status and try again.",
"Please check the API status and try again."
)
raise JSONDecodeError(
msg,
e.doc,
e.pos,
) from e
@@ -312,7 +328,9 @@ class ChatDeepSeek(BaseChatOpenAI):
schema: Optional[_DictOrPydanticClass] = None,
*,
method: Literal[
"function_calling", "json_mode", "json_schema"
"function_calling",
"json_mode",
"json_schema",
] = "function_calling",
include_raw: bool = False,
strict: Optional[bool] = None,
@@ -381,5 +399,9 @@ class ChatDeepSeek(BaseChatOpenAI):
if method == "json_schema":
method = "function_calling"
return super().with_structured_output(
schema, method=method, include_raw=include_raw, strict=strict, **kwargs
schema,
method=method,
include_raw=include_raw,
strict=strict,
**kwargs,
)