deepseek[patch]: ruff fixes and rules (#31901)

* bump ruff deps
* add more thorough ruff rules
* fix said rules
This commit is contained in:
Mason Daugherty 2025-07-07 21:54:44 -04:00 committed by GitHub
parent 38bd1abb8c
commit 231e8d0f43
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 126 additions and 41 deletions

View File

@ -1,5 +1,7 @@
"""DeepSeek chat models.""" """DeepSeek chat models."""
from __future__ import annotations
from collections.abc import Iterator from collections.abc import Iterator
from json import JSONDecodeError from json import JSONDecodeError
from typing import Any, Literal, Optional, TypeVar, Union from typing import Any, Literal, Optional, TypeVar, Union
@ -163,11 +165,11 @@ class ChatDeepSeek(BaseChatOpenAI):
model_name: str = Field(alias="model") model_name: str = Field(alias="model")
"""The name of the model""" """The name of the model"""
api_key: Optional[SecretStr] = Field( api_key: Optional[SecretStr] = Field(
default_factory=secret_from_env("DEEPSEEK_API_KEY", default=None) default_factory=secret_from_env("DEEPSEEK_API_KEY", default=None),
) )
"""DeepSeek API key""" """DeepSeek API key"""
api_base: str = Field( api_base: str = Field(
default_factory=from_env("DEEPSEEK_API_BASE", default=DEFAULT_API_BASE) default_factory=from_env("DEEPSEEK_API_BASE", default=DEFAULT_API_BASE),
) )
"""DeepSeek API base URL""" """DeepSeek API base URL"""
@ -188,7 +190,8 @@ class ChatDeepSeek(BaseChatOpenAI):
if self.api_base == DEFAULT_API_BASE and not ( if self.api_base == DEFAULT_API_BASE and not (
self.api_key and self.api_key.get_secret_value() self.api_key and self.api_key.get_secret_value()
): ):
raise ValueError("If using default api base, DEEPSEEK_API_KEY must be set.") msg = "If using default api base, DEEPSEEK_API_KEY must be set."
raise ValueError(msg)
client_params: dict = { client_params: dict = {
k: v k: v
for k, v in { for k, v in {
@ -225,13 +228,14 @@ class ChatDeepSeek(BaseChatOpenAI):
if not isinstance(response, openai.BaseModel): if not isinstance(response, openai.BaseModel):
return rtn return rtn
if hasattr(response.choices[0].message, "reasoning_content"): # type: ignore choices = getattr(response, "choices", None)
rtn.generations[0].message.additional_kwargs["reasoning_content"] = ( if choices and hasattr(choices[0].message, "reasoning_content"):
response.choices[0].message.reasoning_content # type: ignore rtn.generations[0].message.additional_kwargs["reasoning_content"] = choices[
) 0
].message.reasoning_content
# Handle use via OpenRouter # Handle use via OpenRouter
elif hasattr(response.choices[0].message, "model_extra"): # type: ignore elif choices and hasattr(choices[0].message, "model_extra"):
model_extra = response.choices[0].message.model_extra # type: ignore model_extra = choices[0].message.model_extra
if isinstance(model_extra, dict) and ( if isinstance(model_extra, dict) and (
reasoning := model_extra.get("reasoning") reasoning := model_extra.get("reasoning")
): ):
@ -278,12 +282,18 @@ class ChatDeepSeek(BaseChatOpenAI):
) -> Iterator[ChatGenerationChunk]: ) -> Iterator[ChatGenerationChunk]:
try: try:
yield from super()._stream( yield from super()._stream(
messages, stop=stop, run_manager=run_manager, **kwargs messages,
stop=stop,
run_manager=run_manager,
**kwargs,
) )
except JSONDecodeError as e: except JSONDecodeError as e:
raise JSONDecodeError( msg = (
"DeepSeek API returned an invalid response. " "DeepSeek API returned an invalid response. "
"Please check the API status and try again.", "Please check the API status and try again."
)
raise JSONDecodeError(
msg,
e.doc, e.doc,
e.pos, e.pos,
) from e ) from e
@ -297,12 +307,18 @@ class ChatDeepSeek(BaseChatOpenAI):
) -> ChatResult: ) -> ChatResult:
try: try:
return super()._generate( return super()._generate(
messages, stop=stop, run_manager=run_manager, **kwargs messages,
stop=stop,
run_manager=run_manager,
**kwargs,
) )
except JSONDecodeError as e: except JSONDecodeError as e:
raise JSONDecodeError( msg = (
"DeepSeek API returned an invalid response. " "DeepSeek API returned an invalid response. "
"Please check the API status and try again.", "Please check the API status and try again."
)
raise JSONDecodeError(
msg,
e.doc, e.doc,
e.pos, e.pos,
) from e ) from e
@ -312,7 +328,9 @@ class ChatDeepSeek(BaseChatOpenAI):
schema: Optional[_DictOrPydanticClass] = None, schema: Optional[_DictOrPydanticClass] = None,
*, *,
method: Literal[ method: Literal[
"function_calling", "json_mode", "json_schema" "function_calling",
"json_mode",
"json_schema",
] = "function_calling", ] = "function_calling",
include_raw: bool = False, include_raw: bool = False,
strict: Optional[bool] = None, strict: Optional[bool] = None,
@ -381,5 +399,9 @@ class ChatDeepSeek(BaseChatOpenAI):
if method == "json_schema": if method == "json_schema":
method = "function_calling" method = "function_calling"
return super().with_structured_output( return super().with_structured_output(
schema, method=method, include_raw=include_raw, strict=strict, **kwargs schema,
method=method,
include_raw=include_raw,
strict=strict,
**kwargs,
) )

View File

@ -49,8 +49,52 @@ disallow_untyped_defs = "True"
target-version = "py39" target-version = "py39"
[tool.ruff.lint] [tool.ruff.lint]
select = ["E", "F", "I", "T201", "UP", "S"] select = [
ignore = [ "UP007", ] "A", # flake8-builtins
"ASYNC", # flake8-async
"C4", # flake8-comprehensions
"COM", # flake8-commas
"D", # pydocstyle
"DOC", # pydoclint
"E", # pycodestyle error
"EM", # flake8-errmsg
"F", # pyflakes
"FA", # flake8-future-annotations
"FBT", # flake8-boolean-trap
"FLY", # flake8-flynt
"I", # isort
"ICN", # flake8-import-conventions
"INT", # flake8-gettext
"ISC", # isort-comprehensions
"PGH", # pygrep-hooks
"PIE", # flake8-pie
"PERF", # flake8-perf
"PYI", # flake8-pyi
"Q", # flake8-quotes
"RET", # flake8-return
"RSE", # flake8-rst-docstrings
"RUF", # ruff
"S", # flake8-bandit
"SLF", # flake8-self
"SLOT", # flake8-slots
"SIM", # flake8-simplify
"T10", # flake8-debugger
"T20", # flake8-print
"TID", # flake8-tidy-imports
"UP", # pyupgrade
"W", # pycodestyle warning
"YTT", # flake8-2020
]
ignore = [
"D100", # Missing docstring in public module
"D101", # Missing docstring in public class
"D102", # Missing docstring in public method
"D103", # Missing docstring in public function
"D104", # Missing docstring in public package
"D105", # Missing docstring in magic method
"D107", # Missing docstring in __init__
"UP007", # pyupgrade: non-pep604-annotation-union
]
[tool.coverage.run] [tool.coverage.run]
omit = ["tests/*"] omit = ["tests/*"]
@ -66,4 +110,5 @@ asyncio_mode = "auto"
"tests/**/*.py" = [ "tests/**/*.py" = [
"S101", # Tests need assertions "S101", # Tests need assertions
"S311", # Standard pseudo-random generators are not suitable for cryptographic purposes "S311", # Standard pseudo-random generators are not suitable for cryptographic purposes
"SLF001", # Private member access
] ]

View File

@ -8,7 +8,7 @@ if __name__ == "__main__":
for file in files: for file in files:
try: try:
SourceFileLoader("x", file).load_module() SourceFileLoader("x", file).load_module()
except Exception: except Exception: # noqa: PERF203
has_failure = True has_failure = True
print(file) # noqa: T201 print(file) # noqa: T201
traceback.print_exc() traceback.print_exc()

View File

@ -1,5 +1,7 @@
"""Test ChatDeepSeek chat model.""" """Test ChatDeepSeek chat model."""
from __future__ import annotations
from typing import Optional from typing import Optional
import pytest import pytest
@ -31,7 +33,9 @@ class TestChatDeepSeek(ChatModelIntegrationTests):
@pytest.mark.xfail(reason="Not yet supported.") @pytest.mark.xfail(reason="Not yet supported.")
def test_tool_message_histories_list_content( def test_tool_message_histories_list_content(
self, model: BaseChatModel, my_adder_tool: BaseTool self,
model: BaseChatModel,
my_adder_tool: BaseTool,
) -> None: ) -> None:
super().test_tool_message_histories_list_content(model, my_adder_tool) super().test_tool_message_histories_list_content(model, my_adder_tool)
@ -43,7 +47,7 @@ def test_reasoning_content() -> None:
response = chat_model.invoke("What is 3^3?") response = chat_model.invoke("What is 3^3?")
assert response.content assert response.content
assert response.additional_kwargs["reasoning_content"] assert response.additional_kwargs["reasoning_content"]
raise ValueError() raise ValueError
@pytest.mark.xfail(reason="Takes > 30s to run.") @pytest.mark.xfail(reason="Takes > 30s to run.")

View File

@ -4,4 +4,3 @@ import pytest
@pytest.mark.compile @pytest.mark.compile
def test_placeholder() -> None: def test_placeholder() -> None:
"""Used for compiling integration tests without running any real tests.""" """Used for compiling integration tests without running any real tests."""
pass

View File

@ -1,5 +1,7 @@
"""Test chat model integration.""" """Test chat model integration."""
from __future__ import annotations
from typing import Any, Literal, Union from typing import Any, Literal, Union
from unittest.mock import MagicMock from unittest.mock import MagicMock
@ -19,7 +21,7 @@ class MockOpenAIResponse(BaseModel):
def model_dump( def model_dump(
self, self,
*, *,
mode: Union[Literal["json", "python"], str] = "python", mode: Union[Literal["json", "python"], str] = "python", # noqa: PYI051
include: Any = None, include: Any = None,
exclude: Any = None, exclude: Any = None,
by_alias: bool = False, by_alias: bool = False,
@ -100,7 +102,8 @@ class TestChatDeepSeekCustomUnit:
mock_message.reasoning_content = "This is the reasoning content" mock_message.reasoning_content = "This is the reasoning content"
mock_message.role = "assistant" mock_message.role = "assistant"
mock_response = MockOpenAIResponse( mock_response = MockOpenAIResponse(
choices=[MagicMock(message=mock_message)], error=None choices=[MagicMock(message=mock_message)],
error=None,
) )
result = chat_model._create_chat_result(mock_response) result = chat_model._create_chat_result(mock_response)
@ -140,16 +143,19 @@ class TestChatDeepSeekCustomUnit:
"delta": { "delta": {
"content": "Main content", "content": "Main content",
"reasoning_content": "Streaming reasoning content", "reasoning_content": "Streaming reasoning content",
} },
} },
] ],
} }
chunk_result = chat_model._convert_chunk_to_generation_chunk( chunk_result = chat_model._convert_chunk_to_generation_chunk(
chunk, AIMessageChunk, None chunk,
AIMessageChunk,
None,
) )
if chunk_result is None: if chunk_result is None:
raise AssertionError("Expected chunk_result not to be None") msg = "Expected chunk_result not to be None"
raise AssertionError(msg)
assert ( assert (
chunk_result.message.additional_kwargs.get("reasoning_content") chunk_result.message.additional_kwargs.get("reasoning_content")
== "Streaming reasoning content" == "Streaming reasoning content"
@ -164,16 +170,19 @@ class TestChatDeepSeekCustomUnit:
"delta": { "delta": {
"content": "Main content", "content": "Main content",
"reasoning": "Streaming reasoning", "reasoning": "Streaming reasoning",
} },
} },
] ],
} }
chunk_result = chat_model._convert_chunk_to_generation_chunk( chunk_result = chat_model._convert_chunk_to_generation_chunk(
chunk, AIMessageChunk, None chunk,
AIMessageChunk,
None,
) )
if chunk_result is None: if chunk_result is None:
raise AssertionError("Expected chunk_result not to be None") msg = "Expected chunk_result not to be None"
raise AssertionError(msg)
assert ( assert (
chunk_result.message.additional_kwargs.get("reasoning_content") chunk_result.message.additional_kwargs.get("reasoning_content")
== "Streaming reasoning" == "Streaming reasoning"
@ -185,10 +194,13 @@ class TestChatDeepSeekCustomUnit:
chunk: dict[str, Any] = {"choices": [{"delta": {"content": "Main content"}}]} chunk: dict[str, Any] = {"choices": [{"delta": {"content": "Main content"}}]}
chunk_result = chat_model._convert_chunk_to_generation_chunk( chunk_result = chat_model._convert_chunk_to_generation_chunk(
chunk, AIMessageChunk, None chunk,
AIMessageChunk,
None,
) )
if chunk_result is None: if chunk_result is None:
raise AssertionError("Expected chunk_result not to be None") msg = "Expected chunk_result not to be None"
raise AssertionError(msg)
assert chunk_result.message.additional_kwargs.get("reasoning_content") is None assert chunk_result.message.additional_kwargs.get("reasoning_content") is None
def test_convert_chunk_with_empty_delta(self) -> None: def test_convert_chunk_with_empty_delta(self) -> None:
@ -197,8 +209,11 @@ class TestChatDeepSeekCustomUnit:
chunk: dict[str, Any] = {"choices": [{"delta": {}}]} chunk: dict[str, Any] = {"choices": [{"delta": {}}]}
chunk_result = chat_model._convert_chunk_to_generation_chunk( chunk_result = chat_model._convert_chunk_to_generation_chunk(
chunk, AIMessageChunk, None chunk,
AIMessageChunk,
None,
) )
if chunk_result is None: if chunk_result is None:
raise AssertionError("Expected chunk_result not to be None") msg = "Expected chunk_result not to be None"
raise AssertionError(msg)
assert chunk_result.message.additional_kwargs.get("reasoning_content") is None assert chunk_result.message.additional_kwargs.get("reasoning_content") is None