feat(text-splitters): ruff fixes and rules (#32502)

This commit is contained in:
Mason Daugherty 2025-08-11 13:28:22 -04:00 committed by GitHub
parent 27b6b53f20
commit 457ce9c4b0
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
12 changed files with 161 additions and 69 deletions

View File

@ -3,11 +3,10 @@ from __future__ import annotations
import copy
import logging
from abc import ABC, abstractmethod
from collections.abc import Collection, Iterable, Sequence
from collections.abc import Set as AbstractSet
from dataclasses import dataclass
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
Callable,
Literal,
@ -17,6 +16,11 @@ from typing import (
)
from langchain_core.documents import BaseDocumentTransformer, Document
from typing_extensions import Self
if TYPE_CHECKING:
from collections.abc import Collection, Iterable, Sequence
from collections.abc import Set as AbstractSet
logger = logging.getLogger(__name__)
@ -163,33 +167,33 @@ class TextSplitter(BaseDocumentTransformer, ABC):
def _huggingface_tokenizer_length(text: str) -> int:
return len(tokenizer.tokenize(text))
except ImportError:
except ImportError as err:
msg = (
"Could not import transformers python package. "
"Please install it with `pip install transformers`."
)
raise ValueError(msg)
raise ValueError(msg) from err
return cls(length_function=_huggingface_tokenizer_length, **kwargs)
@classmethod
def from_tiktoken_encoder(
cls: type[TS],
cls,
encoding_name: str = "gpt2",
model_name: Optional[str] = None,
allowed_special: Union[Literal["all"], AbstractSet[str]] = set(),
disallowed_special: Union[Literal["all"], Collection[str]] = "all",
**kwargs: Any,
) -> TS:
) -> Self:
"""Text splitter that uses tiktoken encoder to count length."""
try:
import tiktoken
except ImportError:
except ImportError as err:
msg = (
"Could not import tiktoken python package. "
"This is needed in order to calculate max_tokens_for_prompt. "
"Please install it with `pip install tiktoken`."
)
raise ImportError(msg)
raise ImportError(msg) from err
if model_name is not None:
enc = tiktoken.encoding_for_model(model_name)
@ -238,13 +242,13 @@ class TokenTextSplitter(TextSplitter):
super().__init__(**kwargs)
try:
import tiktoken
except ImportError:
except ImportError as err:
msg = (
"Could not import tiktoken python package. "
"This is needed in order to for TokenTextSplitter. "
"Please install it with `pip install tiktoken`."
)
raise ImportError(msg)
raise ImportError(msg) from err
if model_name is not None:
enc = tiktoken.encoding_for_model(model_name)

View File

@ -39,8 +39,8 @@ class CharacterTextSplitter(TextSplitter):
)
# 4. Decide merge separator:
# - if keep_separator or lookaround → dont re-insert
# - else re-insert literal separator
# - if keep_separator or lookaround -> don't re-insert
# - else -> re-insert literal separator
merge_sep = ""
if not (self._keep_separator or is_lookaround):
merge_sep = self._separator

View File

@ -3,9 +3,9 @@ from __future__ import annotations
import copy
import pathlib
import re
from collections.abc import Iterable, Sequence
from io import StringIO
from typing import (
TYPE_CHECKING,
Any,
Callable,
Literal,
@ -21,6 +21,11 @@ from langchain_core.documents import BaseDocumentTransformer, Document
from langchain_text_splitters.character import RecursiveCharacterTextSplitter
if TYPE_CHECKING:
from collections.abc import Iterable, Sequence
from bs4.element import PageElement
class ElementType(TypedDict):
"""Element type as typed dict."""
@ -159,7 +164,11 @@ class HTMLHeaderTextSplitter:
requests.RequestException: If the HTTP request fails.
"""
kwargs.setdefault("timeout", timeout)
response = requests.get(url, **kwargs)
response = requests.get(
url,
timeout=kwargs.get("timeout", timeout),
**{k: v for k, v in kwargs.items() if k != "timeout"},
)
response.raise_for_status()
return self.split_text(response.text)
@ -229,9 +238,9 @@ class HTMLHeaderTextSplitter:
children = list(node.children)
from bs4.element import Tag
for child in reversed(children):
if isinstance(child, Tag):
stack.append(child)
stack.extend(
child for child in reversed(children) if isinstance(child, Tag)
)
tag = getattr(node, "name", None)
if not tag:
@ -382,7 +391,6 @@ class HTMLSectionSplitter:
"""
try:
from bs4 import BeautifulSoup
from bs4.element import PageElement
except ImportError as e:
msg = "Unable to import BeautifulSoup/PageElement, \
please install with `pip install \
@ -396,7 +404,7 @@ class HTMLSectionSplitter:
headers = soup.find_all(["body", *headers]) # type: ignore[assignment]
for i, header in enumerate(headers):
header_element = cast(PageElement, header)
header_element = cast("PageElement", header)
if i == 0:
current_header = "#TITLE#"
current_header_tag = "h1"
@ -477,7 +485,7 @@ class HTMLSectionSplitter:
return [
Document(
cast(str, section["content"]),
cast("str", section["content"]),
metadata={
self.headers_to_split_on[str(section["tag_name"])]: section[
"header"
@ -587,19 +595,19 @@ class HTMLSemanticPreservingSplitter(BaseDocumentTransformer):
denylist_tags: Optional[list[str]] = None,
preserve_parent_metadata: bool = False,
keep_separator: Union[bool, Literal["start", "end"]] = True,
):
) -> None:
"""Initialize splitter."""
try:
from bs4 import BeautifulSoup, Tag
self._BeautifulSoup = BeautifulSoup
self._Tag = Tag
except ImportError:
except ImportError as err:
msg = (
"Could not import BeautifulSoup. "
"Please install it with 'pip install bs4'."
)
raise ImportError(msg)
raise ImportError(msg) from err
self._headers_to_split_on = sorted(headers_to_split_on)
self._max_chunk_size = max_chunk_size
@ -647,11 +655,11 @@ class HTMLSemanticPreservingSplitter(BaseDocumentTransformer):
nltk.download("stopwords")
self._stopwords = set(nltk.corpus.stopwords.words(self._stopword_lang))
except ImportError:
except ImportError as err:
msg = (
"Could not import nltk. Please install it with 'pip install nltk'."
)
raise ImportError(msg)
raise ImportError(msg) from err
def split_text(self, text: str) -> list[Document]:
"""Splits the provided HTML text into smaller chunks based on the configuration.

View File

@ -23,7 +23,7 @@ class RecursiveJsonSplitter:
def __init__(
self, max_chunk_size: int = 2000, min_chunk_size: Optional[int] = None
):
) -> None:
"""Initialize the chunk size configuration for text processing.
This constructor sets up the maximum and minimum chunk sizes, ensuring that

View File

@ -21,12 +21,12 @@ class KonlpyTextSplitter(TextSplitter):
self._separator = separator
try:
import konlpy
except ImportError:
except ImportError as err:
msg = """
Konlpy is not installed, please install it with
`pip install konlpy`
"""
raise ImportError(msg)
raise ImportError(msg) from err
self.kkma = konlpy.tag.Kkma()
def split_text(self, text: str) -> list[str]:

View File

@ -26,7 +26,7 @@ class MarkdownHeaderTextSplitter:
headers_to_split_on: list[tuple[str, str]],
return_each_line: bool = False, # noqa: FBT001,FBT002
strip_headers: bool = True, # noqa: FBT001,FBT002
):
) -> None:
"""Create a new MarkdownHeaderTextSplitter.
Args:
@ -281,7 +281,7 @@ class ExperimentalMarkdownSyntaxTextSplitter:
headers_to_split_on: Union[list[tuple[str, str]], None] = None,
return_each_line: bool = False, # noqa: FBT001,FBT002
strip_headers: bool = True, # noqa: FBT001,FBT002
):
) -> None:
"""Initialize the text splitter with header splitting and formatting options.
This constructor sets up the required configuration for splitting text into

View File

@ -31,9 +31,9 @@ class NLTKTextSplitter(TextSplitter):
self._tokenizer = nltk.tokenize._get_punkt_tokenizer(self._language)
else:
self._tokenizer = nltk.tokenize.sent_tokenize
except ImportError:
except ImportError as err:
msg = "NLTK is not installed, please install it with `pip install nltk`."
raise ImportError(msg)
raise ImportError(msg) from err
def split_text(self, text: str) -> list[str]:
"""Split incoming text and return chunks."""

View File

@ -20,13 +20,13 @@ class SentenceTransformersTokenTextSplitter(TextSplitter):
try:
from sentence_transformers import SentenceTransformer
except ImportError:
except ImportError as err:
msg = (
"Could not import sentence_transformers python package. "
"This is needed in order to for SentenceTransformersTokenTextSplitter. "
"Please install it with `pip install sentence-transformers`."
)
raise ImportError(msg)
raise ImportError(msg) from err
self.model_name = model_name
self._model = SentenceTransformer(self.model_name)

View File

@ -45,11 +45,13 @@ def _make_spacy_pipeline_for_splitting(
) -> Any: # avoid importing spacy
try:
import spacy
except ImportError:
except ImportError as err:
msg = "Spacy is not installed, please install it with `pip install spacy`."
raise ImportError(msg)
raise ImportError(msg) from err
if pipeline == "sentencizer":
sentencizer: Any = spacy.lang.en.English()
from spacy.lang.en import English
sentencizer: Any = English()
sentencizer.add_pipe("sentencizer")
else:
sentencizer = spacy.load(pipeline, exclude=["ner", "tagger"])

View File

@ -6,7 +6,10 @@ build-backend = "pdm.backend"
authors = []
license = { text = "MIT" }
requires-python = ">=3.9"
dependencies = ["langchain-core<1.0.0,>=0.3.72"]
dependencies = [
"langchain-core<1.0.0,>=0.3.72",
"pip>=25.2",
]
name = "langchain-text-splitters"
version = "0.3.9"
description = "LangChain text splitting utilities"
@ -18,7 +21,7 @@ readme = "README.md"
repository = "https://github.com/langchain-ai/langchain"
[dependency-groups]
lint = ["ruff<0.13,>=0.12.2", "langchain-core"]
lint = ["ruff<0.13,>=0.12.8", "langchain-core"]
typing = [
"mypy<2.0,>=1.15",
"lxml-stubs<1.0.0,>=0.5.1",
@ -42,6 +45,7 @@ test_integration = [
"nltk<4.0.0,>=3.9.1",
"transformers<5.0.0,>=4.51.3",
"sentence-transformers>=3.0.1",
"tiktoken<1.0.0,>=0.8.0",
]
[tool.uv.sources]
@ -61,10 +65,72 @@ ignore_missing_imports = "True"
target-version = "py39"
[tool.ruff.lint]
select = ["E", "F", "FBT", "I", "UP", "PGH003", "T201", "D"]
ignore = ["D100",]
select = [
"A", # flake8-builtins
"B", # flake8-bugbear
"ASYNC", # flake8-async
"C4", # flake8-comprehensions
"COM", # flake8-commas
"D", # pydocstyle
"E", # pycodestyle error
"EM", # flake8-errmsg
"F", # pyflakes
"FA", # flake8-future-annotations
"FBT", # flake8-boolean-trap
"FLY", # flake8-flynt
"I", # isort
"ICN", # flake8-import-conventions
"INT", # flake8-gettext
"ISC", # isort-comprehensions
"PGH", # pygrep-hooks
"PIE", # flake8-pie
"PERF", # flake8-perf
"PYI", # flake8-pyi
"Q", # flake8-quotes
"RET", # flake8-return
"RSE", # flake8-rst-docstrings
"RUF", # ruff
"S", # flake8-bandit
"SLF", # flake8-self
"SLOT", # flake8-slots
"SIM", # flake8-simplify
"T10", # flake8-debugger
"T20", # flake8-print
"TID", # flake8-tidy-imports
"UP", # pyupgrade
"W", # pycodestyle warning
"YTT", # flake8-2020
]
ignore = [
"D100", # pydocstyle: Missing docstring in public module
"D101", # pydocstyle: Missing docstring in public class
"D102", # pydocstyle: Missing docstring in public method
"D103", # pydocstyle: Missing docstring in public function
"D104", # pydocstyle: Missing docstring in public package
"D105", # pydocstyle: Missing docstring in magic method
"D107", # pydocstyle: Missing docstring in __init__
"D203", # Messes with the formatter
"D213", # pydocstyle: Multi-line docstring summary should start at the second line (incompatible with D212)
"D407", # pydocstyle: Missing-dashed-underline-after-section
"COM812", # Messes with the formatter
"ISC001", # Messes with the formatter
"PERF203", # Rarely useful
"S112", # Rarely useful
"RUF012", # Doesn't play well with Pydantic
"SLF001", # Private member access
"UP007", # pyupgrade: non-pep604-annotation-union
"UP045", # pyupgrade: non-pep604-annotation-optional
]
unfixable = ["B028"] # People should intentionally tune the stacklevel
pyupgrade.keep-runtime-typing = true
[tool.ruff.lint.extend-per-file-ignores]
"tests/**/*.py" = [
"S101", # Tests need assertions
"S311", # Standard pseudo-random generators are not suitable for cryptographic purposes
]
[tool.coverage.run]
omit = ["tests/*"]

View File

@ -1,7 +1,6 @@
import random
import string
import sys
import traceback
import uuid
from importlib.machinery import SourceFileLoader
if __name__ == "__main__":
@ -9,9 +8,7 @@ if __name__ == "__main__":
has_failure = False
for file in files:
try:
module_name = "".join(
random.choice(string.ascii_letters) for _ in range(20)
)
module_name = f"test_module_{uuid.uuid4().hex[:20]}"
SourceFileLoader(module_name, file).load_module()
except Exception:
has_failure = True

View File

@ -1,5 +1,5 @@
version = 1
revision = 2
revision = 3
requires-python = ">=3.9"
resolution-markers = [
"python_full_version >= '3.13'",
@ -1090,7 +1090,7 @@ wheels = [
[[package]]
name = "langchain-core"
version = "0.3.72"
version = "0.3.74"
source = { editable = "../core" }
dependencies = [
{ name = "jsonpatch" },
@ -1152,6 +1152,7 @@ version = "0.3.9"
source = { editable = "." }
dependencies = [
{ name = "langchain-core" },
{ name = "pip" },
]
[package.dev-dependencies]
@ -1178,6 +1179,7 @@ test-integration = [
{ name = "sentence-transformers" },
{ name = "spacy" },
{ name = "thinc" },
{ name = "tiktoken" },
{ name = "transformers" },
]
typing = [
@ -1188,7 +1190,10 @@ typing = [
]
[package.metadata]
requires-dist = [{ name = "langchain-core", editable = "../core" }]
requires-dist = [
{ name = "langchain-core", editable = "../core" },
{ name = "pip", specifier = ">=25.2" },
]
[package.metadata.requires-dev]
dev = [
@ -1197,7 +1202,7 @@ dev = [
]
lint = [
{ name = "langchain-core", editable = "../core" },
{ name = "ruff", specifier = ">=0.12.2,<0.13" },
{ name = "ruff", specifier = ">=0.12.8,<0.13" },
]
test = [
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
@ -1214,6 +1219,7 @@ test-integration = [
{ name = "sentence-transformers", specifier = ">=3.0.1" },
{ name = "spacy", specifier = ">=3.8.7,<4.0.0" },
{ name = "thinc", specifier = ">=8.3.6,<9.0.0" },
{ name = "tiktoken", specifier = ">=0.8.0,<1.0.0" },
{ name = "transformers", specifier = ">=4.51.3,<5.0.0" },
]
typing = [
@ -2133,6 +2139,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/41/67/936f9814bdd74b2dfd4822f1f7725ab5d8ff4103919a1664eb4874c58b2f/pillow-11.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:4637b88343166249fe8aa94e7c4a62a180c4b3898283bb5d3d2fd5fe10d8e4e0", size = 2626353, upload-time = "2025-01-02T08:13:52.725Z" },
]
[[package]]
name = "pip"
version = "25.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/20/16/650289cd3f43d5a2fadfd98c68bd1e1e7f2550a1a5326768cddfbcedb2c5/pip-25.2.tar.gz", hash = "sha256:578283f006390f85bb6282dffb876454593d637f5d1be494b5202ce4877e71f2", size = 1840021, upload-time = "2025-07-30T21:50:15.401Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/b7/3f/945ef7ab14dc4f9d7f40288d2df998d1837ee0888ec3659c813487572faa/pip-25.2-py3-none-any.whl", hash = "sha256:6d67a2b4e7f14d8b31b8b52648866fa717f45a1eb70e83002f4331d07e953717", size = 1752557, upload-time = "2025-07-30T21:50:13.323Z" },
]
[[package]]
name = "platformdirs"
version = "4.3.6"
@ -2919,27 +2934,27 @@ wheels = [
[[package]]
name = "ruff"
version = "0.12.2"
version = "0.12.8"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/6c/3d/d9a195676f25d00dbfcf3cf95fdd4c685c497fcfa7e862a44ac5e4e96480/ruff-0.12.2.tar.gz", hash = "sha256:d7b4f55cd6f325cb7621244f19c873c565a08aff5a4ba9c69aa7355f3f7afd3e", size = 4432239, upload-time = "2025-07-03T16:40:19.566Z" }
sdist = { url = "https://files.pythonhosted.org/packages/4b/da/5bd7565be729e86e1442dad2c9a364ceeff82227c2dece7c29697a9795eb/ruff-0.12.8.tar.gz", hash = "sha256:4cb3a45525176e1009b2b64126acf5f9444ea59066262791febf55e40493a033", size = 5242373, upload-time = "2025-08-07T19:05:47.268Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/74/b6/2098d0126d2d3318fd5bec3ad40d06c25d377d95749f7a0c5af17129b3b1/ruff-0.12.2-py3-none-linux_armv6l.whl", hash = "sha256:093ea2b221df1d2b8e7ad92fc6ffdca40a2cb10d8564477a987b44fd4008a7be", size = 10369761, upload-time = "2025-07-03T16:39:38.847Z" },
{ url = "https://files.pythonhosted.org/packages/b1/4b/5da0142033dbe155dc598cfb99262d8ee2449d76920ea92c4eeb9547c208/ruff-0.12.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:09e4cf27cc10f96b1708100fa851e0daf21767e9709e1649175355280e0d950e", size = 11155659, upload-time = "2025-07-03T16:39:42.294Z" },
{ url = "https://files.pythonhosted.org/packages/3e/21/967b82550a503d7c5c5c127d11c935344b35e8c521f52915fc858fb3e473/ruff-0.12.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:8ae64755b22f4ff85e9c52d1f82644abd0b6b6b6deedceb74bd71f35c24044cc", size = 10537769, upload-time = "2025-07-03T16:39:44.75Z" },
{ url = "https://files.pythonhosted.org/packages/33/91/00cff7102e2ec71a4890fb7ba1803f2cdb122d82787c7d7cf8041fe8cbc1/ruff-0.12.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3eb3a6b2db4d6e2c77e682f0b988d4d61aff06860158fdb413118ca133d57922", size = 10717602, upload-time = "2025-07-03T16:39:47.652Z" },
{ url = "https://files.pythonhosted.org/packages/9b/eb/928814daec4e1ba9115858adcda44a637fb9010618721937491e4e2283b8/ruff-0.12.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:73448de992d05517170fc37169cbca857dfeaeaa8c2b9be494d7bcb0d36c8f4b", size = 10198772, upload-time = "2025-07-03T16:39:49.641Z" },
{ url = "https://files.pythonhosted.org/packages/50/fa/f15089bc20c40f4f72334f9145dde55ab2b680e51afb3b55422effbf2fb6/ruff-0.12.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b8b94317cbc2ae4a2771af641739f933934b03555e51515e6e021c64441532d", size = 11845173, upload-time = "2025-07-03T16:39:52.069Z" },
{ url = "https://files.pythonhosted.org/packages/43/9f/1f6f98f39f2b9302acc161a4a2187b1e3a97634fe918a8e731e591841cf4/ruff-0.12.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:45fc42c3bf1d30d2008023a0a9a0cfb06bf9835b147f11fe0679f21ae86d34b1", size = 12553002, upload-time = "2025-07-03T16:39:54.551Z" },
{ url = "https://files.pythonhosted.org/packages/d8/70/08991ac46e38ddd231c8f4fd05ef189b1b94be8883e8c0c146a025c20a19/ruff-0.12.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce48f675c394c37e958bf229fb5c1e843e20945a6d962cf3ea20b7a107dcd9f4", size = 12171330, upload-time = "2025-07-03T16:39:57.55Z" },
{ url = "https://files.pythonhosted.org/packages/88/a9/5a55266fec474acfd0a1c73285f19dd22461d95a538f29bba02edd07a5d9/ruff-0.12.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:793d8859445ea47591272021a81391350205a4af65a9392401f418a95dfb75c9", size = 11774717, upload-time = "2025-07-03T16:39:59.78Z" },
{ url = "https://files.pythonhosted.org/packages/87/e5/0c270e458fc73c46c0d0f7cf970bb14786e5fdb88c87b5e423a4bd65232b/ruff-0.12.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6932323db80484dda89153da3d8e58164d01d6da86857c79f1961934354992da", size = 11646659, upload-time = "2025-07-03T16:40:01.934Z" },
{ url = "https://files.pythonhosted.org/packages/b7/b6/45ab96070c9752af37f0be364d849ed70e9ccede07675b0ec4e3ef76b63b/ruff-0.12.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:6aa7e623a3a11538108f61e859ebf016c4f14a7e6e4eba1980190cacb57714ce", size = 10604012, upload-time = "2025-07-03T16:40:04.363Z" },
{ url = "https://files.pythonhosted.org/packages/86/91/26a6e6a424eb147cc7627eebae095cfa0b4b337a7c1c413c447c9ebb72fd/ruff-0.12.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2a4a20aeed74671b2def096bdf2eac610c7d8ffcbf4fb0e627c06947a1d7078d", size = 10176799, upload-time = "2025-07-03T16:40:06.514Z" },
{ url = "https://files.pythonhosted.org/packages/f5/0c/9f344583465a61c8918a7cda604226e77b2c548daf8ef7c2bfccf2b37200/ruff-0.12.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:71a4c550195612f486c9d1f2b045a600aeba851b298c667807ae933478fcef04", size = 11241507, upload-time = "2025-07-03T16:40:08.708Z" },
{ url = "https://files.pythonhosted.org/packages/1c/b7/99c34ded8fb5f86c0280278fa89a0066c3760edc326e935ce0b1550d315d/ruff-0.12.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:4987b8f4ceadf597c927beee65a5eaf994c6e2b631df963f86d8ad1bdea99342", size = 11717609, upload-time = "2025-07-03T16:40:10.836Z" },
{ url = "https://files.pythonhosted.org/packages/51/de/8589fa724590faa057e5a6d171e7f2f6cffe3287406ef40e49c682c07d89/ruff-0.12.2-py3-none-win32.whl", hash = "sha256:369ffb69b70cd55b6c3fc453b9492d98aed98062db9fec828cdfd069555f5f1a", size = 10523823, upload-time = "2025-07-03T16:40:13.203Z" },
{ url = "https://files.pythonhosted.org/packages/94/47/8abf129102ae4c90cba0c2199a1a9b0fa896f6f806238d6f8c14448cc748/ruff-0.12.2-py3-none-win_amd64.whl", hash = "sha256:dca8a3b6d6dc9810ed8f328d406516bf4d660c00caeaef36eb831cf4871b0639", size = 11629831, upload-time = "2025-07-03T16:40:15.478Z" },
{ url = "https://files.pythonhosted.org/packages/e2/1f/72d2946e3cc7456bb837e88000eb3437e55f80db339c840c04015a11115d/ruff-0.12.2-py3-none-win_arm64.whl", hash = "sha256:48d6c6bfb4761df68bc05ae630e24f506755e702d4fb08f08460be778c7ccb12", size = 10735334, upload-time = "2025-07-03T16:40:17.677Z" },
{ url = "https://files.pythonhosted.org/packages/c9/1e/c843bfa8ad1114fab3eb2b78235dda76acd66384c663a4e0415ecc13aa1e/ruff-0.12.8-py3-none-linux_armv6l.whl", hash = "sha256:63cb5a5e933fc913e5823a0dfdc3c99add73f52d139d6cd5cc8639d0e0465513", size = 11675315, upload-time = "2025-08-07T19:05:06.15Z" },
{ url = "https://files.pythonhosted.org/packages/24/ee/af6e5c2a8ca3a81676d5480a1025494fd104b8896266502bb4de2a0e8388/ruff-0.12.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:9a9bbe28f9f551accf84a24c366c1aa8774d6748438b47174f8e8565ab9dedbc", size = 12456653, upload-time = "2025-08-07T19:05:09.759Z" },
{ url = "https://files.pythonhosted.org/packages/99/9d/e91f84dfe3866fa648c10512904991ecc326fd0b66578b324ee6ecb8f725/ruff-0.12.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:2fae54e752a3150f7ee0e09bce2e133caf10ce9d971510a9b925392dc98d2fec", size = 11659690, upload-time = "2025-08-07T19:05:12.551Z" },
{ url = "https://files.pythonhosted.org/packages/fe/ac/a363d25ec53040408ebdd4efcee929d48547665858ede0505d1d8041b2e5/ruff-0.12.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0acbcf01206df963d9331b5838fb31f3b44fa979ee7fa368b9b9057d89f4a53", size = 11896923, upload-time = "2025-08-07T19:05:14.821Z" },
{ url = "https://files.pythonhosted.org/packages/58/9f/ea356cd87c395f6ade9bb81365bd909ff60860975ca1bc39f0e59de3da37/ruff-0.12.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ae3e7504666ad4c62f9ac8eedb52a93f9ebdeb34742b8b71cd3cccd24912719f", size = 11477612, upload-time = "2025-08-07T19:05:16.712Z" },
{ url = "https://files.pythonhosted.org/packages/1a/46/92e8fa3c9dcfd49175225c09053916cb97bb7204f9f899c2f2baca69e450/ruff-0.12.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb82efb5d35d07497813a1c5647867390a7d83304562607f3579602fa3d7d46f", size = 13182745, upload-time = "2025-08-07T19:05:18.709Z" },
{ url = "https://files.pythonhosted.org/packages/5e/c4/f2176a310f26e6160deaf661ef60db6c3bb62b7a35e57ae28f27a09a7d63/ruff-0.12.8-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:dbea798fc0065ad0b84a2947b0aff4233f0cb30f226f00a2c5850ca4393de609", size = 14206885, upload-time = "2025-08-07T19:05:21.025Z" },
{ url = "https://files.pythonhosted.org/packages/87/9d/98e162f3eeeb6689acbedbae5050b4b3220754554526c50c292b611d3a63/ruff-0.12.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:49ebcaccc2bdad86fd51b7864e3d808aad404aab8df33d469b6e65584656263a", size = 13639381, upload-time = "2025-08-07T19:05:23.423Z" },
{ url = "https://files.pythonhosted.org/packages/81/4e/1b7478b072fcde5161b48f64774d6edd59d6d198e4ba8918d9f4702b8043/ruff-0.12.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ac9c570634b98c71c88cb17badd90f13fc076a472ba6ef1d113d8ed3df109fb", size = 12613271, upload-time = "2025-08-07T19:05:25.507Z" },
{ url = "https://files.pythonhosted.org/packages/e8/67/0c3c9179a3ad19791ef1b8f7138aa27d4578c78700551c60d9260b2c660d/ruff-0.12.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:560e0cd641e45591a3e42cb50ef61ce07162b9c233786663fdce2d8557d99818", size = 12847783, upload-time = "2025-08-07T19:05:28.14Z" },
{ url = "https://files.pythonhosted.org/packages/4e/2a/0b6ac3dd045acf8aa229b12c9c17bb35508191b71a14904baf99573a21bd/ruff-0.12.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:71c83121512e7743fba5a8848c261dcc454cafb3ef2934a43f1b7a4eb5a447ea", size = 11702672, upload-time = "2025-08-07T19:05:30.413Z" },
{ url = "https://files.pythonhosted.org/packages/9d/ee/f9fdc9f341b0430110de8b39a6ee5fa68c5706dc7c0aa940817947d6937e/ruff-0.12.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:de4429ef2ba091ecddedd300f4c3f24bca875d3d8b23340728c3cb0da81072c3", size = 11440626, upload-time = "2025-08-07T19:05:32.492Z" },
{ url = "https://files.pythonhosted.org/packages/89/fb/b3aa2d482d05f44e4d197d1de5e3863feb13067b22c571b9561085c999dc/ruff-0.12.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:a2cab5f60d5b65b50fba39a8950c8746df1627d54ba1197f970763917184b161", size = 12462162, upload-time = "2025-08-07T19:05:34.449Z" },
{ url = "https://files.pythonhosted.org/packages/18/9f/5c5d93e1d00d854d5013c96e1a92c33b703a0332707a7cdbd0a4880a84fb/ruff-0.12.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:45c32487e14f60b88aad6be9fd5da5093dbefb0e3e1224131cb1d441d7cb7d46", size = 12913212, upload-time = "2025-08-07T19:05:36.541Z" },
{ url = "https://files.pythonhosted.org/packages/71/13/ab9120add1c0e4604c71bfc2e4ef7d63bebece0cfe617013da289539cef8/ruff-0.12.8-py3-none-win32.whl", hash = "sha256:daf3475060a617fd5bc80638aeaf2f5937f10af3ec44464e280a9d2218e720d3", size = 11694382, upload-time = "2025-08-07T19:05:38.468Z" },
{ url = "https://files.pythonhosted.org/packages/f6/dc/a2873b7c5001c62f46266685863bee2888caf469d1edac84bf3242074be2/ruff-0.12.8-py3-none-win_amd64.whl", hash = "sha256:7209531f1a1fcfbe8e46bcd7ab30e2f43604d8ba1c49029bb420b103d0b5f76e", size = 12740482, upload-time = "2025-08-07T19:05:40.391Z" },
{ url = "https://files.pythonhosted.org/packages/cb/5c/799a1efb8b5abab56e8a9f2a0b72d12bd64bb55815e9476c7d0a2887d2f7/ruff-0.12.8-py3-none-win_arm64.whl", hash = "sha256:c90e1a334683ce41b0e7a04f41790c429bf5073b62c1ae701c9dc5b3d14f0749", size = 11884718, upload-time = "2025-08-07T19:05:42.866Z" },
]
[[package]]