1
0
mirror of https://github.com/hwchase17/langchain.git synced 2025-05-06 07:38:50 +00:00

core: Add ruff rules for Pylint PLC (Convention) and PLE (Errors) ()

See https://docs.astral.sh/ruff/rules/#pylint-pl
This commit is contained in:
Christophe Bornet 2025-04-02 16:58:03 +02:00 committed by GitHub
parent fe0fd9dd70
commit ccc3d32ec8
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
16 changed files with 79 additions and 80 deletions
libs/core
langchain_core
pyproject.toml
tests/unit_tests
language_models/chat_models
output_parsers
runnables
uv.lock

View File

@ -2244,9 +2244,6 @@ class AsyncCallbackManagerForChainGroup(AsyncCallbackManager):
T = TypeVar("T", CallbackManager, AsyncCallbackManager)
H = TypeVar("H", bound=BaseCallbackHandler, covariant=True)
def _configure(
callback_manager_cls: type[T],
inheritable_callbacks: Callbacks = None,

View File

@ -11,8 +11,7 @@ from abc import ABC, abstractmethod
from typing import TYPE_CHECKING
# Re-export Blob and PathLike for backwards compatibility
from langchain_core.documents.base import Blob as Blob
from langchain_core.documents.base import PathLike as PathLike
from langchain_core.documents.base import Blob, PathLike
if TYPE_CHECKING:
from collections.abc import Iterable

View File

@ -25,7 +25,7 @@ class RemoveMessage(BaseMessage):
msg = "RemoveMessage does not support 'content' field."
raise ValueError(msg)
return super().__init__("", id=id, **kwargs)
super().__init__("", id=id, **kwargs)
RemoveMessage.model_rebuild()

View File

@ -8,7 +8,6 @@ from abc import abstractmethod
from collections import deque
from io import StringIO
from typing import TYPE_CHECKING, TypeVar, Union
from typing import Optional as Optional
from typing_extensions import override

View File

@ -1,7 +1,5 @@
"""String output parser."""
from typing import Optional as Optional
from langchain_core.output_parsers.transform import BaseTransformOutputParser

View File

@ -1,7 +1,6 @@
"""[DEPRECATED] Pipeline prompt template."""
from typing import Any
from typing import Optional as Optional
from pydantic import model_validator

View File

@ -5,8 +5,12 @@ from __future__ import annotations
import enum
import threading
from abc import abstractmethod
from collections.abc import AsyncIterator, Iterator, Sequence
from collections.abc import Mapping as Mapping
from collections.abc import (
AsyncIterator,
Iterator,
Mapping, # noqa: F401 Needed by pydantic
Sequence,
)
from functools import wraps
from typing import (
TYPE_CHECKING,

View File

@ -195,13 +195,13 @@ def draw_mermaid(
)
# Recursively add nested subgraphs
for nested_prefix in edge_groups:
for nested_prefix, edges_ in edge_groups.items():
if not nested_prefix.startswith(prefix + ":") or nested_prefix == prefix:
continue
# only go to first level subgraphs
if ":" in nested_prefix[len(prefix) + 1 :]:
continue
add_subgraph(edge_groups[nested_prefix], nested_prefix)
add_subgraph(edges_, nested_prefix)
if prefix and not self_loop:
mermaid_graph += "\tend\n"
@ -210,20 +210,20 @@ def draw_mermaid(
add_subgraph(edge_groups.get("", []), "")
# Add remaining subgraphs with edges
for prefix in edge_groups:
for prefix, edges_ in edge_groups.items():
if ":" in prefix or prefix == "":
continue
add_subgraph(edge_groups[prefix], prefix)
add_subgraph(edges_, prefix)
seen_subgraphs.add(prefix)
# Add empty subgraphs (subgraphs with no internal edges)
if with_styles:
for prefix in subgraph_nodes:
for prefix, subgraph_node in subgraph_nodes.items():
if ":" not in prefix and prefix not in seen_subgraphs:
mermaid_graph += f"\tsubgraph {prefix}\n"
# Add nodes that belong to this subgraph
for key, node in subgraph_nodes[prefix].items():
for key, node in subgraph_node.items():
mermaid_graph += render_node(key, node)
mermaid_graph += "\tend\n"

View File

@ -23,7 +23,7 @@ from typing import (
from typing_extensions import TypeGuard, override
# Re-export create-model for backwards compatibility
from langchain_core.utils.pydantic import create_model as create_model
from langchain_core.utils.pydantic import create_model # noqa: F401
if TYPE_CHECKING:
from collections.abc import (
@ -38,9 +38,9 @@ if TYPE_CHECKING:
from langchain_core.runnables.schema import StreamEvent
Input = TypeVar("Input", contravariant=True)
Input = TypeVar("Input", contravariant=True) # noqa: PLC0105
# Output type should implement __concat__, as eg str, list, dict do
Output = TypeVar("Output", covariant=True)
Output = TypeVar("Output", covariant=True) # noqa: PLC0105
async def gated_coro(semaphore: asyncio.Semaphore, coro: Coroutine) -> Any:

View File

@ -20,45 +20,51 @@ tool for the job.
from __future__ import annotations
from langchain_core.tools.base import (
FILTERED_ARGS as FILTERED_ARGS,
)
from langchain_core.tools.base import (
ArgsSchema as ArgsSchema,
)
from langchain_core.tools.base import (
BaseTool as BaseTool,
)
from langchain_core.tools.base import (
BaseToolkit as BaseToolkit,
)
from langchain_core.tools.base import (
InjectedToolArg as InjectedToolArg,
)
from langchain_core.tools.base import InjectedToolCallId as InjectedToolCallId
from langchain_core.tools.base import SchemaAnnotationError as SchemaAnnotationError
from langchain_core.tools.base import (
ToolException as ToolException,
)
from langchain_core.tools.base import (
_get_runnable_config_param as _get_runnable_config_param,
)
from langchain_core.tools.base import (
create_schema_from_function as create_schema_from_function,
FILTERED_ARGS,
ArgsSchema,
BaseTool,
BaseToolkit,
InjectedToolArg,
InjectedToolCallId,
SchemaAnnotationError,
ToolException,
_get_runnable_config_param,
create_schema_from_function,
)
from langchain_core.tools.convert import (
convert_runnable_to_tool as convert_runnable_to_tool,
)
from langchain_core.tools.convert import tool as tool
from langchain_core.tools.render import ToolsRenderer as ToolsRenderer
from langchain_core.tools.render import (
render_text_description as render_text_description,
convert_runnable_to_tool,
tool,
)
from langchain_core.tools.render import (
render_text_description_and_args as render_text_description_and_args,
ToolsRenderer,
render_text_description,
render_text_description_and_args,
)
from langchain_core.tools.retriever import RetrieverInput as RetrieverInput
from langchain_core.tools.retriever import (
create_retriever_tool as create_retriever_tool,
RetrieverInput,
create_retriever_tool,
)
from langchain_core.tools.simple import Tool as Tool
from langchain_core.tools.structured import StructuredTool as StructuredTool
from langchain_core.tools.simple import Tool
from langchain_core.tools.structured import StructuredTool
__all__ = [
"ArgsSchema",
"BaseTool",
"BaseToolkit",
"FILTERED_ARGS",
"SchemaAnnotationError",
"ToolException",
"InjectedToolArg",
"InjectedToolCallId",
"_get_runnable_config_param",
"create_schema_from_function",
"convert_runnable_to_tool",
"tool",
"ToolsRenderer",
"render_text_description",
"render_text_description_and_args",
"RetrieverInput",
"create_retriever_tool",
"Tool",
"StructuredTool",
]

View File

@ -324,8 +324,8 @@ def _html_escape(string: str) -> str:
# & must be handled first
string = string.replace("&", "&")
for char in html_codes:
string = string.replace(char, html_codes[char])
for char, code in html_codes.items():
string = string.replace(char, code)
return string

View File

@ -101,8 +101,6 @@ ignore = [
"FBT002",
"FIX",
"PGH",
"PLC",
"PLE",
"PLR",
"PYI",
"RET",

View File

@ -1,5 +1,4 @@
import time
from typing import Optional as Optional
import pytest
from blockbuster import BlockBuster

View File

@ -1,7 +1,5 @@
"""Module to test base parser implementations."""
from typing import Optional as Optional
from langchain_core.exceptions import OutputParserException
from langchain_core.language_models import GenericFakeChatModel
from langchain_core.messages import AIMessage

View File

@ -5,7 +5,6 @@ import sys
from collections.abc import AsyncIterator, Sequence
from itertools import cycle
from typing import Any, cast
from typing import Optional as Optional
import pytest
from pydantic import BaseModel

35
uv.lock
View File

@ -2153,7 +2153,7 @@ wheels = [
[[package]]
name = "langchain"
version = "0.3.20"
version = "0.3.22"
source = { editable = "libs/langchain" }
dependencies = [
{ name = "async-timeout", marker = "python_full_version < '3.11'" },
@ -2171,6 +2171,7 @@ requires-dist = [
{ name = "async-timeout", marker = "python_full_version < '3.11'", specifier = ">=4.0.0,<5.0.0" },
{ name = "langchain-anthropic", marker = "extra == 'anthropic'" },
{ name = "langchain-aws", marker = "extra == 'aws'" },
{ name = "langchain-azure-ai", marker = "extra == 'azure-ai'" },
{ name = "langchain-cohere", marker = "extra == 'cohere'" },
{ name = "langchain-community", marker = "extra == 'community'" },
{ name = "langchain-core", editable = "libs/core" },
@ -2192,7 +2193,7 @@ requires-dist = [
{ name = "requests", specifier = ">=2,<3" },
{ name = "sqlalchemy", specifier = ">=1.4,<3" },
]
provides-extras = ["community", "anthropic", "openai", "cohere", "google-vertexai", "google-genai", "fireworks", "ollama", "together", "mistralai", "huggingface", "groq", "aws", "deepseek", "xai"]
provides-extras = ["community", "anthropic", "openai", "azure-ai", "cohere", "google-vertexai", "google-genai", "fireworks", "ollama", "together", "mistralai", "huggingface", "groq", "aws", "deepseek", "xai"]
[package.metadata.requires-dev]
codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }]
@ -2261,7 +2262,7 @@ typing = [
[[package]]
name = "langchain-anthropic"
version = "0.3.9"
version = "0.3.10"
source = { editable = "libs/partners/anthropic" }
dependencies = [
{ name = "anthropic" },
@ -2271,7 +2272,7 @@ dependencies = [
[package.metadata]
requires-dist = [
{ name = "anthropic", specifier = ">=0.47.0,<1" },
{ name = "anthropic", specifier = ">=0.49.0,<1" },
{ name = "langchain-core", editable = "libs/core" },
{ name = "pydantic", specifier = ">=2.7.4,<3.0.0" },
]
@ -2288,6 +2289,7 @@ test = [
{ name = "pytest", specifier = ">=7.3.0,<8.0.0" },
{ name = "pytest-asyncio", specifier = ">=0.21.1,<1.0.0" },
{ name = "pytest-mock", specifier = ">=3.10.0,<4.0.0" },
{ name = "pytest-retry", specifier = ">=1.7.0,<1.8.0" },
{ name = "pytest-socket", specifier = ">=0.7.0,<1.0.0" },
{ name = "pytest-timeout", specifier = ">=2.3.1,<3.0.0" },
{ name = "pytest-watcher", specifier = ">=0.3.4,<1.0.0" },
@ -2362,7 +2364,7 @@ typing = [
[[package]]
name = "langchain-community"
version = "0.3.19"
version = "0.3.20"
source = { editable = "libs/community" }
dependencies = [
{ name = "aiohttp" },
@ -2451,7 +2453,7 @@ typing = [
[[package]]
name = "langchain-core"
version = "0.3.43"
version = "0.3.49"
source = { editable = "libs/core" }
dependencies = [
{ name = "jsonpatch" },
@ -2481,7 +2483,7 @@ dev = [
{ name = "jupyter", specifier = ">=1.0.0,<2.0.0" },
{ name = "setuptools", specifier = ">=67.6.1,<68.0.0" },
]
lint = [{ name = "ruff", specifier = ">=0.9.2,<1.0.0" }]
lint = [{ name = "ruff", specifier = ">=0.11.2,<0.12.0" }]
test = [
{ name = "blockbuster", specifier = "~=1.5.18" },
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
@ -2518,7 +2520,7 @@ dependencies = [
[[package]]
name = "langchain-fireworks"
version = "0.2.7"
version = "0.2.9"
source = { editable = "libs/partners/fireworks" }
dependencies = [
{ name = "aiohttp" },
@ -2574,7 +2576,7 @@ dependencies = [
[[package]]
name = "langchain-groq"
version = "0.2.5"
version = "0.3.2"
source = { editable = "libs/partners/groq" }
dependencies = [
{ name = "groq" },
@ -2607,7 +2609,7 @@ typing = [
[[package]]
name = "langchain-mistralai"
version = "0.2.7"
version = "0.2.10"
source = { editable = "libs/partners/mistralai" }
dependencies = [
{ name = "httpx" },
@ -2733,7 +2735,7 @@ typing = []
[[package]]
name = "langchain-openai"
version = "0.3.8"
version = "0.3.11"
source = { editable = "libs/partners/openai" }
dependencies = [
{ name = "langchain-core" },
@ -2744,7 +2746,7 @@ dependencies = [
[package.metadata]
requires-dist = [
{ name = "langchain-core", editable = "libs/core" },
{ name = "openai", specifier = ">=1.66.0,<2.0.0" },
{ name = "openai", specifier = ">=1.68.2,<2.0.0" },
{ name = "tiktoken", specifier = ">=0.7,<1" },
]
@ -2762,6 +2764,7 @@ test = [
{ name = "pytest-asyncio", specifier = ">=0.21.1,<1.0.0" },
{ name = "pytest-cov", specifier = ">=4.1.0,<5.0.0" },
{ name = "pytest-mock", specifier = ">=3.10.0,<4.0.0" },
{ name = "pytest-retry", specifier = ">=1.7.0,<1.8.0" },
{ name = "pytest-socket", specifier = ">=0.6.0,<1.0.0" },
{ name = "pytest-watcher", specifier = ">=0.3.4,<1.0.0" },
{ name = "pytest-xdist", specifier = ">=3.6.1,<4.0.0" },
@ -2781,7 +2784,7 @@ typing = [
[[package]]
name = "langchain-text-splitters"
version = "0.3.6"
version = "0.3.7"
source = { editable = "libs/text-splitters" }
dependencies = [
{ name = "langchain-core" },
@ -3631,7 +3634,7 @@ wheels = [
[[package]]
name = "openai"
version = "1.66.2"
version = "1.70.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
@ -3643,9 +3646,9 @@ dependencies = [
{ name = "tqdm" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/d8/e1/b3e1fda1aa32d4f40d4de744e91de4de65c854c3e53c63342e4b5f9c5995/openai-1.66.2.tar.gz", hash = "sha256:9b3a843c25f81ee09b6469d483d9fba779d5c6ea41861180772f043481b0598d", size = 397041 }
sdist = { url = "https://files.pythonhosted.org/packages/87/f5/ae0f3cd226c2993b4ac1cc4b5f6ca099764689f403c14922c9356accec66/openai-1.70.0.tar.gz", hash = "sha256:e52a8d54c3efeb08cf58539b5b21a5abef25368b5432965e4de88cdf4e091b2b", size = 409640 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/2c/6f/3315b3583ffe3e31c55b446cb22d2a7c235e65ca191674fffae62deb3c11/openai-1.66.2-py3-none-any.whl", hash = "sha256:75194057ee6bb8b732526387b6041327a05656d976fc21c064e21c8ac6b07999", size = 567268 },
{ url = "https://files.pythonhosted.org/packages/e2/39/c4b38317d2c702c4bc763957735aaeaf30dfc43b5b824121c49a4ba7ba0f/openai-1.70.0-py3-none-any.whl", hash = "sha256:f6438d053fd8b2e05fd6bef70871e832d9bbdf55e119d0ac5b92726f1ae6f614", size = 599070 },
]
[[package]]