deepseek[patch]: ruff fixes and rules (#31901)

* bump ruff deps
* add more thorough ruff rules
* fix said rules
This commit is contained in:
Mason Daugherty 2025-07-07 21:54:44 -04:00 committed by GitHub
parent 38bd1abb8c
commit 231e8d0f43
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 126 additions and 41 deletions

View File

@ -1,5 +1,7 @@
"""DeepSeek chat models."""
from __future__ import annotations
from collections.abc import Iterator
from json import JSONDecodeError
from typing import Any, Literal, Optional, TypeVar, Union
@ -163,11 +165,11 @@ class ChatDeepSeek(BaseChatOpenAI):
model_name: str = Field(alias="model")
"""The name of the model"""
api_key: Optional[SecretStr] = Field(
default_factory=secret_from_env("DEEPSEEK_API_KEY", default=None)
default_factory=secret_from_env("DEEPSEEK_API_KEY", default=None),
)
"""DeepSeek API key"""
api_base: str = Field(
default_factory=from_env("DEEPSEEK_API_BASE", default=DEFAULT_API_BASE)
default_factory=from_env("DEEPSEEK_API_BASE", default=DEFAULT_API_BASE),
)
"""DeepSeek API base URL"""
@ -188,7 +190,8 @@ class ChatDeepSeek(BaseChatOpenAI):
if self.api_base == DEFAULT_API_BASE and not (
self.api_key and self.api_key.get_secret_value()
):
raise ValueError("If using default api base, DEEPSEEK_API_KEY must be set.")
msg = "If using default api base, DEEPSEEK_API_KEY must be set."
raise ValueError(msg)
client_params: dict = {
k: v
for k, v in {
@ -225,13 +228,14 @@ class ChatDeepSeek(BaseChatOpenAI):
if not isinstance(response, openai.BaseModel):
return rtn
if hasattr(response.choices[0].message, "reasoning_content"): # type: ignore
rtn.generations[0].message.additional_kwargs["reasoning_content"] = (
response.choices[0].message.reasoning_content # type: ignore
)
choices = getattr(response, "choices", None)
if choices and hasattr(choices[0].message, "reasoning_content"):
rtn.generations[0].message.additional_kwargs["reasoning_content"] = choices[
0
].message.reasoning_content
# Handle use via OpenRouter
elif hasattr(response.choices[0].message, "model_extra"): # type: ignore
model_extra = response.choices[0].message.model_extra # type: ignore
elif choices and hasattr(choices[0].message, "model_extra"):
model_extra = choices[0].message.model_extra
if isinstance(model_extra, dict) and (
reasoning := model_extra.get("reasoning")
):
@ -278,12 +282,18 @@ class ChatDeepSeek(BaseChatOpenAI):
) -> Iterator[ChatGenerationChunk]:
try:
yield from super()._stream(
messages, stop=stop, run_manager=run_manager, **kwargs
messages,
stop=stop,
run_manager=run_manager,
**kwargs,
)
except JSONDecodeError as e:
raise JSONDecodeError(
msg = (
"DeepSeek API returned an invalid response. "
"Please check the API status and try again.",
"Please check the API status and try again."
)
raise JSONDecodeError(
msg,
e.doc,
e.pos,
) from e
@ -297,12 +307,18 @@ class ChatDeepSeek(BaseChatOpenAI):
) -> ChatResult:
try:
return super()._generate(
messages, stop=stop, run_manager=run_manager, **kwargs
messages,
stop=stop,
run_manager=run_manager,
**kwargs,
)
except JSONDecodeError as e:
raise JSONDecodeError(
msg = (
"DeepSeek API returned an invalid response. "
"Please check the API status and try again.",
"Please check the API status and try again."
)
raise JSONDecodeError(
msg,
e.doc,
e.pos,
) from e
@ -312,7 +328,9 @@ class ChatDeepSeek(BaseChatOpenAI):
schema: Optional[_DictOrPydanticClass] = None,
*,
method: Literal[
"function_calling", "json_mode", "json_schema"
"function_calling",
"json_mode",
"json_schema",
] = "function_calling",
include_raw: bool = False,
strict: Optional[bool] = None,
@ -381,5 +399,9 @@ class ChatDeepSeek(BaseChatOpenAI):
if method == "json_schema":
method = "function_calling"
return super().with_structured_output(
schema, method=method, include_raw=include_raw, strict=strict, **kwargs
schema,
method=method,
include_raw=include_raw,
strict=strict,
**kwargs,
)

View File

@ -49,8 +49,52 @@ disallow_untyped_defs = "True"
target-version = "py39"
[tool.ruff.lint]
select = ["E", "F", "I", "T201", "UP", "S"]
ignore = [ "UP007", ]
select = [
"A", # flake8-builtins
"ASYNC", # flake8-async
"C4", # flake8-comprehensions
"COM", # flake8-commas
"D", # pydocstyle
"DOC", # pydoclint
"E", # pycodestyle error
"EM", # flake8-errmsg
"F", # pyflakes
"FA", # flake8-future-annotations
"FBT", # flake8-boolean-trap
"FLY", # flake8-flynt
"I", # isort
"ICN", # flake8-import-conventions
"INT", # flake8-gettext
"ISC", # isort-comprehensions
"PGH", # pygrep-hooks
"PIE", # flake8-pie
"PERF", # flake8-perf
"PYI", # flake8-pyi
"Q", # flake8-quotes
"RET", # flake8-return
"RSE", # flake8-rst-docstrings
"RUF", # ruff
"S", # flake8-bandit
"SLF", # flake8-self
"SLOT", # flake8-slots
"SIM", # flake8-simplify
"T10", # flake8-debugger
"T20", # flake8-print
"TID", # flake8-tidy-imports
"UP", # pyupgrade
"W", # pycodestyle warning
"YTT", # flake8-2020
]
ignore = [
"D100", # Missing docstring in public module
"D101", # Missing docstring in public class
"D102", # Missing docstring in public method
"D103", # Missing docstring in public function
"D104", # Missing docstring in public package
"D105", # Missing docstring in magic method
"D107", # Missing docstring in __init__
"UP007", # pyupgrade: non-pep604-annotation-union
]
[tool.coverage.run]
omit = ["tests/*"]
@ -66,4 +110,5 @@ asyncio_mode = "auto"
"tests/**/*.py" = [
"S101", # Tests need assertions
"S311", # Standard pseudo-random generators are not suitable for cryptographic purposes
"SLF001", # Private member access
]

View File

@ -8,7 +8,7 @@ if __name__ == "__main__":
for file in files:
try:
SourceFileLoader("x", file).load_module()
except Exception:
except Exception: # noqa: PERF203
has_failure = True
print(file) # noqa: T201
traceback.print_exc()

View File

@ -1,5 +1,7 @@
"""Test ChatDeepSeek chat model."""
from __future__ import annotations
from typing import Optional
import pytest
@ -31,7 +33,9 @@ class TestChatDeepSeek(ChatModelIntegrationTests):
@pytest.mark.xfail(reason="Not yet supported.")
def test_tool_message_histories_list_content(
self, model: BaseChatModel, my_adder_tool: BaseTool
self,
model: BaseChatModel,
my_adder_tool: BaseTool,
) -> None:
super().test_tool_message_histories_list_content(model, my_adder_tool)
@ -43,7 +47,7 @@ def test_reasoning_content() -> None:
response = chat_model.invoke("What is 3^3?")
assert response.content
assert response.additional_kwargs["reasoning_content"]
raise ValueError()
raise ValueError
@pytest.mark.xfail(reason="Takes > 30s to run.")

View File

@ -4,4 +4,3 @@ import pytest
@pytest.mark.compile
def test_placeholder() -> None:
"""Used for compiling integration tests without running any real tests."""
pass

View File

@ -1,5 +1,7 @@
"""Test chat model integration."""
from __future__ import annotations
from typing import Any, Literal, Union
from unittest.mock import MagicMock
@ -19,7 +21,7 @@ class MockOpenAIResponse(BaseModel):
def model_dump(
self,
*,
mode: Union[Literal["json", "python"], str] = "python",
mode: Union[Literal["json", "python"], str] = "python", # noqa: PYI051
include: Any = None,
exclude: Any = None,
by_alias: bool = False,
@ -100,7 +102,8 @@ class TestChatDeepSeekCustomUnit:
mock_message.reasoning_content = "This is the reasoning content"
mock_message.role = "assistant"
mock_response = MockOpenAIResponse(
choices=[MagicMock(message=mock_message)], error=None
choices=[MagicMock(message=mock_message)],
error=None,
)
result = chat_model._create_chat_result(mock_response)
@ -140,16 +143,19 @@ class TestChatDeepSeekCustomUnit:
"delta": {
"content": "Main content",
"reasoning_content": "Streaming reasoning content",
}
}
]
},
},
],
}
chunk_result = chat_model._convert_chunk_to_generation_chunk(
chunk, AIMessageChunk, None
chunk,
AIMessageChunk,
None,
)
if chunk_result is None:
raise AssertionError("Expected chunk_result not to be None")
msg = "Expected chunk_result not to be None"
raise AssertionError(msg)
assert (
chunk_result.message.additional_kwargs.get("reasoning_content")
== "Streaming reasoning content"
@ -164,16 +170,19 @@ class TestChatDeepSeekCustomUnit:
"delta": {
"content": "Main content",
"reasoning": "Streaming reasoning",
}
}
]
},
},
],
}
chunk_result = chat_model._convert_chunk_to_generation_chunk(
chunk, AIMessageChunk, None
chunk,
AIMessageChunk,
None,
)
if chunk_result is None:
raise AssertionError("Expected chunk_result not to be None")
msg = "Expected chunk_result not to be None"
raise AssertionError(msg)
assert (
chunk_result.message.additional_kwargs.get("reasoning_content")
== "Streaming reasoning"
@ -185,10 +194,13 @@ class TestChatDeepSeekCustomUnit:
chunk: dict[str, Any] = {"choices": [{"delta": {"content": "Main content"}}]}
chunk_result = chat_model._convert_chunk_to_generation_chunk(
chunk, AIMessageChunk, None
chunk,
AIMessageChunk,
None,
)
if chunk_result is None:
raise AssertionError("Expected chunk_result not to be None")
msg = "Expected chunk_result not to be None"
raise AssertionError(msg)
assert chunk_result.message.additional_kwargs.get("reasoning_content") is None
def test_convert_chunk_with_empty_delta(self) -> None:
@ -197,8 +209,11 @@ class TestChatDeepSeekCustomUnit:
chunk: dict[str, Any] = {"choices": [{"delta": {}}]}
chunk_result = chat_model._convert_chunk_to_generation_chunk(
chunk, AIMessageChunk, None
chunk,
AIMessageChunk,
None,
)
if chunk_result is None:
raise AssertionError("Expected chunk_result not to be None")
msg = "Expected chunk_result not to be None"
raise AssertionError(msg)
assert chunk_result.message.additional_kwargs.get("reasoning_content") is None