core: Add ruff rules for PIE (#26939)

All auto-fixes.
This commit is contained in:
Christophe Bornet 2024-09-27 18:08:35 +02:00 committed by GitHub
parent 836c2a4ae0
commit f4e738bb40
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
21 changed files with 16 additions and 52 deletions

View File

@ -112,7 +112,6 @@ class StreamingStdOutCallbackHandler(BaseCallbackHandler):
action (AgentAction): The agent action. action (AgentAction): The agent action.
**kwargs (Any): Additional keyword arguments. **kwargs (Any): Additional keyword arguments.
""" """
pass
def on_tool_end(self, output: Any, **kwargs: Any) -> None: def on_tool_end(self, output: Any, **kwargs: Any) -> None:
"""Run when tool ends running. """Run when tool ends running.

View File

@ -357,7 +357,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
stop: Optional[list[str]] = None, stop: Optional[list[str]] = None,
**kwargs: Any, **kwargs: Any,
) -> Iterator[BaseMessageChunk]: ) -> Iterator[BaseMessageChunk]:
if not self._should_stream(async_api=False, **{**kwargs, **{"stream": True}}): if not self._should_stream(async_api=False, **{**kwargs, "stream": True}):
# model doesn't implement streaming, so use default implementation # model doesn't implement streaming, so use default implementation
yield cast( yield cast(
BaseMessageChunk, self.invoke(input, config=config, stop=stop, **kwargs) BaseMessageChunk, self.invoke(input, config=config, stop=stop, **kwargs)
@ -427,7 +427,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
stop: Optional[list[str]] = None, stop: Optional[list[str]] = None,
**kwargs: Any, **kwargs: Any,
) -> AsyncIterator[BaseMessageChunk]: ) -> AsyncIterator[BaseMessageChunk]:
if not self._should_stream(async_api=True, **{**kwargs, **{"stream": True}}): if not self._should_stream(async_api=True, **{**kwargs, "stream": True}):
# No async or sync stream is implemented, so fall back to ainvoke # No async or sync stream is implemented, so fall back to ainvoke
yield cast( yield cast(
BaseMessageChunk, BaseMessageChunk,
@ -550,7 +550,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
def _get_llm_string(self, stop: Optional[list[str]] = None, **kwargs: Any) -> str: def _get_llm_string(self, stop: Optional[list[str]] = None, **kwargs: Any) -> str:
if self.is_lc_serializable(): if self.is_lc_serializable():
params = {**kwargs, **{"stop": stop}} params = {**kwargs, "stop": stop}
param_string = str(sorted(params.items())) param_string = str(sorted(params.items()))
# This code is not super efficient as it goes back and forth between # This code is not super efficient as it goes back and forth between
# json and dict. # json and dict.

View File

@ -1007,11 +1007,9 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
input_vars.update(_message.input_variables) input_vars.update(_message.input_variables)
kwargs = { kwargs = {
**{
"input_variables": sorted(input_vars), "input_variables": sorted(input_vars),
"optional_variables": sorted(optional_variables), "optional_variables": sorted(optional_variables),
"partial_variables": partial_vars, "partial_variables": partial_vars,
},
**kwargs, **kwargs,
} }
cast(type[ChatPromptTemplate], super()).__init__(messages=_messages, **kwargs) cast(type[ChatPromptTemplate], super()).__init__(messages=_messages, **kwargs)

View File

@ -36,8 +36,6 @@ else:
class EmptyDict(TypedDict, total=False): class EmptyDict(TypedDict, total=False):
"""Empty dict type.""" """Empty dict type."""
pass
class RunnableConfig(TypedDict, total=False): class RunnableConfig(TypedDict, total=False):
"""Configuration for a Runnable.""" """Configuration for a Runnable."""

View File

@ -457,8 +457,6 @@ RunnableConfigurableFields.model_rebuild()
class StrEnum(str, enum.Enum): class StrEnum(str, enum.Enum):
"""String enum.""" """String enum."""
pass
_enums_for_spec: WeakValueDictionary[ _enums_for_spec: WeakValueDictionary[
Union[ Union[

View File

@ -619,7 +619,8 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
return self.__class__( return self.__class__(
**{ **{
**self.model_dump(), **self.model_dump(),
**{"runnable": new_runnable, "fallbacks": new_fallbacks}, "runnable": new_runnable,
"fallbacks": new_fallbacks,
} }
) )

View File

@ -320,8 +320,6 @@ class ToolException(Exception): # noqa: N818
to the agent as observation, and printed in red on the console. to the agent as observation, and printed in red on the console.
""" """
pass
class BaseTool(RunnableSerializable[Union[str, dict, ToolCall], Any]): class BaseTool(RunnableSerializable[Union[str, dict, ToolCall], Any]):
"""Interface LangChain tools must implement.""" """Interface LangChain tools must implement."""

View File

@ -850,7 +850,6 @@ class AsyncBaseTracer(_TracerCore, AsyncCallbackHandler, ABC):
async def _on_run_create(self, run: Run) -> None: async def _on_run_create(self, run: Run) -> None:
"""Process a run upon creation.""" """Process a run upon creation."""
pass
async def _on_run_update(self, run: Run) -> None: async def _on_run_update(self, run: Run) -> None:
"""Process a run upon update.""" """Process a run upon update."""

View File

@ -829,7 +829,6 @@ async def _astream_events_implementation_v1(
inputs = log_entry["inputs"] inputs = log_entry["inputs"]
if inputs is not None: if inputs is not None:
data["input"] = inputs data["input"] = inputs
pass
if event_type == "end": if event_type == "end":
inputs = log_entry["inputs"] inputs = log_entry["inputs"]

View File

@ -561,7 +561,7 @@ def _parse_google_docstring(
if block.startswith("Args:"): if block.startswith("Args:"):
args_block = block args_block = block
break break
elif block.startswith("Returns:") or block.startswith("Example:"): elif block.startswith(("Returns:", "Example:")):
# Don't break in case Args come after # Don't break in case Args come after
past_descriptors = True past_descriptors = True
elif not past_descriptors: elif not past_descriptors:

View File

@ -32,8 +32,6 @@ _LAST_TAG_LINE = None
class ChevronError(SyntaxError): class ChevronError(SyntaxError):
"""Custom exception for Chevron errors.""" """Custom exception for Chevron errors."""
pass
# #
# Helper functions # Helper functions

View File

@ -268,8 +268,6 @@ def convert_to_secret_str(value: Union[SecretStr, str]) -> SecretStr:
class _NoDefaultType: class _NoDefaultType:
"""Type to indicate no default value is provided.""" """Type to indicate no default value is provided."""
pass
_NoDefault = _NoDefaultType() _NoDefault = _NoDefaultType()

View File

@ -44,7 +44,7 @@ python = ">=3.12.4"
[tool.poetry.extras] [tool.poetry.extras]
[tool.ruff.lint] [tool.ruff.lint]
select = [ "B", "C4", "E", "F", "I", "N", "T201", "UP",] select = [ "B", "C4", "E", "F", "I", "N", "PIE", "T201", "UP",]
ignore = [ "UP007",] ignore = [ "UP007",]
[tool.coverage.run] [tool.coverage.run]

View File

@ -4,4 +4,3 @@ import pytest
@pytest.mark.compile @pytest.mark.compile
def test_placeholder() -> None: def test_placeholder() -> None:
"""Used for compiling integration tests without running any real tests.""" """Used for compiling integration tests without running any real tests."""
pass

View File

@ -68,7 +68,6 @@ async def beta_async_function() -> str:
class ClassWithBetaMethods: class ClassWithBetaMethods:
def __init__(self) -> None: def __init__(self) -> None:
"""original doc""" """original doc"""
pass
@beta() @beta()
def beta_method(self) -> str: def beta_method(self) -> str:
@ -244,7 +243,6 @@ def test_whole_class_beta() -> None:
class BetaClass: class BetaClass:
def __init__(self) -> None: def __init__(self) -> None:
"""original doc""" """original doc"""
pass
@beta() @beta()
def beta_method(self) -> str: def beta_method(self) -> str:

View File

@ -88,7 +88,6 @@ async def deprecated_async_function() -> str:
class ClassWithDeprecatedMethods: class ClassWithDeprecatedMethods:
def __init__(self) -> None: def __init__(self) -> None:
"""original doc""" """original doc"""
pass
@deprecated(since="2.0.0", removal="3.0.0") @deprecated(since="2.0.0", removal="3.0.0")
def deprecated_method(self) -> str: def deprecated_method(self) -> str:
@ -268,7 +267,6 @@ def test_whole_class_deprecation() -> None:
class DeprecatedClass: class DeprecatedClass:
def __init__(self) -> None: def __init__(self) -> None:
"""original doc""" """original doc"""
pass
@deprecated(since="2.0.0", removal="3.0.0") @deprecated(since="2.0.0", removal="3.0.0")
def deprecated_method(self) -> str: def deprecated_method(self) -> str:
@ -311,7 +309,6 @@ def test_whole_class_inherited_deprecation() -> None:
class DeprecatedClass: class DeprecatedClass:
def __init__(self) -> None: def __init__(self) -> None:
"""original doc""" """original doc"""
pass
@deprecated(since="2.0.0", removal="3.0.0") @deprecated(since="2.0.0", removal="3.0.0")
def deprecated_method(self) -> str: def deprecated_method(self) -> str:
@ -324,7 +321,6 @@ def test_whole_class_inherited_deprecation() -> None:
def __init__(self) -> None: def __init__(self) -> None:
"""original doc""" """original doc"""
pass
@deprecated(since="2.2.0", removal="3.2.0") @deprecated(since="2.2.0", removal="3.2.0")
def deprecated_method(self) -> str: def deprecated_method(self) -> str:

View File

@ -107,7 +107,7 @@ async def test_stream_error_callback() -> None:
else: else:
assert llm_result.generations[0][0].text == message[:i] assert llm_result.generations[0][0].text == message[:i]
for i in range(0, 2): for i in range(2):
llm = FakeListChatModel( llm = FakeListChatModel(
responses=[message], responses=[message],
error_on_chunk_number=i, error_on_chunk_number=i,

View File

@ -105,7 +105,7 @@ async def test_stream_error_callback() -> None:
else: else:
assert llm_result.generations[0][0].text == message[:i] assert llm_result.generations[0][0].text == message[:i]
for i in range(0, 2): for i in range(2):
llm = FakeStreamingListLLM( llm = FakeStreamingListLLM(
responses=[message], responses=[message],
error_on_chunk_number=i, error_on_chunk_number=i,

View File

@ -429,16 +429,12 @@ def test_message_chunk_to_message() -> None:
expected = AIMessage( expected = AIMessage(
content="I am", content="I am",
tool_calls=[ tool_calls=[
create_tool_call(**{"name": "tool1", "args": {"a": 1}, "id": "1"}), # type: ignore[arg-type] create_tool_call(name="tool1", args={"a": 1}, id="1"), # type: ignore[arg-type]
create_tool_call(**{"name": "tool2", "args": {}, "id": "2"}), # type: ignore[arg-type] create_tool_call(name="tool2", args={}, id="2"), # type: ignore[arg-type]
], ],
invalid_tool_calls=[ invalid_tool_calls=[
create_invalid_tool_call( create_invalid_tool_call(name="tool3", args=None, id="3", error=None),
**{"name": "tool3", "args": None, "id": "3", "error": None} create_invalid_tool_call(name="tool4", args="abc", id="4", error=None),
),
create_invalid_tool_call(
**{"name": "tool4", "args": "abc", "id": "4", "error": None}
),
], ],
) )
assert message_chunk_to_message(chunk) == expected assert message_chunk_to_message(chunk) == expected

View File

@ -357,7 +357,6 @@ def test_structured_tool_types_parsed_pydantic_mixed() -> None:
some_base_model: SomeBaseModel, another_base_model: AnotherBaseModel some_base_model: SomeBaseModel, another_base_model: AnotherBaseModel
) -> None: ) -> None:
"""Return the arguments directly.""" """Return the arguments directly."""
pass
def test_base_tool_inheritance_base_schema() -> None: def test_base_tool_inheritance_base_schema() -> None:

View File

@ -54,7 +54,6 @@ def annotated_function() -> Callable:
arg2: ExtensionsAnnotated[Literal["bar", "baz"], "one of 'bar', 'baz'"], arg2: ExtensionsAnnotated[Literal["bar", "baz"], "one of 'bar', 'baz'"],
) -> None: ) -> None:
"""dummy function""" """dummy function"""
pass
return dummy_function return dummy_function
@ -68,7 +67,6 @@ def function() -> Callable:
arg1: foo arg1: foo
arg2: one of 'bar', 'baz' arg2: one of 'bar', 'baz'
""" """
pass
return dummy_function return dummy_function
@ -220,7 +218,6 @@ class Dummy:
arg1: foo arg1: foo
arg2: one of 'bar', 'baz' arg2: one of 'bar', 'baz'
""" """
pass
class DummyWithClassMethod: class DummyWithClassMethod:
@ -232,7 +229,6 @@ class DummyWithClassMethod:
arg1: foo arg1: foo
arg2: one of 'bar', 'baz' arg2: one of 'bar', 'baz'
""" """
pass
def test_convert_to_openai_function( def test_convert_to_openai_function(
@ -334,7 +330,6 @@ def test_convert_to_openai_function_nested_v2() -> None:
def my_function(arg1: NestedV2) -> None: def my_function(arg1: NestedV2) -> None:
"""dummy function""" """dummy function"""
pass
convert_to_openai_function(my_function) convert_to_openai_function(my_function)
@ -348,7 +343,6 @@ def test_convert_to_openai_function_nested() -> None:
def my_function(arg1: Nested) -> None: def my_function(arg1: Nested) -> None:
"""dummy function""" """dummy function"""
pass
expected = { expected = {
"name": "my_function", "name": "my_function",
@ -386,7 +380,6 @@ def test_convert_to_openai_function_nested_strict() -> None:
def my_function(arg1: Nested) -> None: def my_function(arg1: Nested) -> None:
"""dummy function""" """dummy function"""
pass
expected = { expected = {
"name": "my_function", "name": "my_function",
@ -429,7 +422,6 @@ def test_function_optional_param() -> None:
c: Optional[list[Optional[str]]], c: Optional[list[Optional[str]]],
) -> None: ) -> None:
"""A test function""" """A test function"""
pass
func = convert_to_openai_function(func5) func = convert_to_openai_function(func5)
req = func["parameters"]["required"] req = func["parameters"]["required"]
@ -439,7 +431,6 @@ def test_function_optional_param() -> None:
def test_function_no_params() -> None: def test_function_no_params() -> None:
def nullary_function() -> None: def nullary_function() -> None:
"""nullary function""" """nullary function"""
pass
func = convert_to_openai_function(nullary_function) func = convert_to_openai_function(nullary_function)
req = func["parameters"].get("required") req = func["parameters"].get("required")
@ -781,7 +772,6 @@ def test_convert_union_type_py_39() -> None:
@tool @tool
def magic_function(input: int | float) -> str: def magic_function(input: int | float) -> str:
"""Compute a magic function.""" """Compute a magic function."""
pass
result = convert_to_openai_function(magic_function) result = convert_to_openai_function(magic_function)
assert result["parameters"]["properties"]["input"] == { assert result["parameters"]["properties"]["input"] == {