mirror of
https://github.com/hwchase17/langchain.git
synced 2025-09-16 15:04:13 +00:00
core: Add ruff rule FBT003 (boolean-trap) (#29424)
See https://docs.astral.sh/ruff/rules/boolean-positional-value-in-call/#boolean-positional-value-in-call-fbt003 This PR also fixes some FBT001/002 in private methods but does not enforce these rules globally atm. Co-authored-by: Eugene Yurtsev <eyurtsev@gmail.com>
This commit is contained in:
committed by
GitHub
parent
4f8ea13cea
commit
558191198f
@@ -41,7 +41,7 @@ async def test_inline_handlers_share_parent_context() -> None:
|
||||
called.
|
||||
"""
|
||||
|
||||
def __init__(self, run_inline: bool) -> None:
|
||||
def __init__(self, *, run_inline: bool) -> None:
|
||||
"""Initialize the handler."""
|
||||
self.run_inline = run_inline
|
||||
|
||||
@@ -91,7 +91,7 @@ async def test_inline_handlers_share_parent_context_multiple() -> None:
|
||||
counter_var.reset(token)
|
||||
|
||||
class StatefulAsyncCallbackHandler(AsyncCallbackHandler):
|
||||
def __init__(self, name: str, run_inline: bool = True):
|
||||
def __init__(self, name: str, *, run_inline: bool = True):
|
||||
self.name = name
|
||||
self.run_inline = run_inline
|
||||
|
||||
|
@@ -362,7 +362,7 @@ EXPECTED_STREAMED_JSON = [
|
||||
]
|
||||
|
||||
|
||||
def _get_iter(use_tool_calls: bool = False) -> Any:
|
||||
def _get_iter(*, use_tool_calls: bool = False) -> Any:
|
||||
if use_tool_calls:
|
||||
list_to_iter = STREAMED_MESSAGES_WITH_TOOL_CALLS
|
||||
else:
|
||||
@@ -374,7 +374,7 @@ def _get_iter(use_tool_calls: bool = False) -> Any:
|
||||
return input_iter
|
||||
|
||||
|
||||
def _get_aiter(use_tool_calls: bool = False) -> Any:
|
||||
def _get_aiter(*, use_tool_calls: bool = False) -> Any:
|
||||
if use_tool_calls:
|
||||
list_to_iter = STREAMED_MESSAGES_WITH_TOOL_CALLS
|
||||
else:
|
||||
@@ -389,7 +389,7 @@ def _get_aiter(use_tool_calls: bool = False) -> Any:
|
||||
|
||||
def test_partial_json_output_parser() -> None:
|
||||
for use_tool_calls in [False, True]:
|
||||
input_iter = _get_iter(use_tool_calls)
|
||||
input_iter = _get_iter(use_tool_calls=use_tool_calls)
|
||||
chain = input_iter | JsonOutputToolsParser()
|
||||
|
||||
actual = list(chain.stream(None))
|
||||
@@ -402,7 +402,7 @@ def test_partial_json_output_parser() -> None:
|
||||
|
||||
async def test_partial_json_output_parser_async() -> None:
|
||||
for use_tool_calls in [False, True]:
|
||||
input_iter = _get_aiter(use_tool_calls)
|
||||
input_iter = _get_aiter(use_tool_calls=use_tool_calls)
|
||||
chain = input_iter | JsonOutputToolsParser()
|
||||
|
||||
actual = [p async for p in chain.astream(None)]
|
||||
@@ -415,7 +415,7 @@ async def test_partial_json_output_parser_async() -> None:
|
||||
|
||||
def test_partial_json_output_parser_return_id() -> None:
|
||||
for use_tool_calls in [False, True]:
|
||||
input_iter = _get_iter(use_tool_calls)
|
||||
input_iter = _get_iter(use_tool_calls=use_tool_calls)
|
||||
chain = input_iter | JsonOutputToolsParser(return_id=True)
|
||||
|
||||
actual = list(chain.stream(None))
|
||||
@@ -434,7 +434,7 @@ def test_partial_json_output_parser_return_id() -> None:
|
||||
|
||||
def test_partial_json_output_key_parser() -> None:
|
||||
for use_tool_calls in [False, True]:
|
||||
input_iter = _get_iter(use_tool_calls)
|
||||
input_iter = _get_iter(use_tool_calls=use_tool_calls)
|
||||
chain = input_iter | JsonOutputKeyToolsParser(key_name="NameCollector")
|
||||
|
||||
actual = list(chain.stream(None))
|
||||
@@ -444,7 +444,7 @@ def test_partial_json_output_key_parser() -> None:
|
||||
|
||||
async def test_partial_json_output_parser_key_async() -> None:
|
||||
for use_tool_calls in [False, True]:
|
||||
input_iter = _get_aiter(use_tool_calls)
|
||||
input_iter = _get_aiter(use_tool_calls=use_tool_calls)
|
||||
|
||||
chain = input_iter | JsonOutputKeyToolsParser(key_name="NameCollector")
|
||||
|
||||
@@ -455,7 +455,7 @@ async def test_partial_json_output_parser_key_async() -> None:
|
||||
|
||||
def test_partial_json_output_key_parser_first_only() -> None:
|
||||
for use_tool_calls in [False, True]:
|
||||
input_iter = _get_iter(use_tool_calls)
|
||||
input_iter = _get_iter(use_tool_calls=use_tool_calls)
|
||||
|
||||
chain = input_iter | JsonOutputKeyToolsParser(
|
||||
key_name="NameCollector", first_tool_only=True
|
||||
@@ -466,7 +466,7 @@ def test_partial_json_output_key_parser_first_only() -> None:
|
||||
|
||||
async def test_partial_json_output_parser_key_async_first_only() -> None:
|
||||
for use_tool_calls in [False, True]:
|
||||
input_iter = _get_aiter(use_tool_calls)
|
||||
input_iter = _get_aiter(use_tool_calls=use_tool_calls)
|
||||
|
||||
chain = input_iter | JsonOutputKeyToolsParser(
|
||||
key_name="NameCollector", first_tool_only=True
|
||||
@@ -507,7 +507,7 @@ EXPECTED_STREAMED_PYDANTIC = [
|
||||
|
||||
def test_partial_pydantic_output_parser() -> None:
|
||||
for use_tool_calls in [False, True]:
|
||||
input_iter = _get_iter(use_tool_calls)
|
||||
input_iter = _get_iter(use_tool_calls=use_tool_calls)
|
||||
|
||||
chain = input_iter | PydanticToolsParser(
|
||||
tools=[NameCollector], first_tool_only=True
|
||||
@@ -519,7 +519,7 @@ def test_partial_pydantic_output_parser() -> None:
|
||||
|
||||
async def test_partial_pydantic_output_parser_async() -> None:
|
||||
for use_tool_calls in [False, True]:
|
||||
input_iter = _get_aiter(use_tool_calls)
|
||||
input_iter = _get_aiter(use_tool_calls=use_tool_calls)
|
||||
|
||||
chain = input_iter | PydanticToolsParser(
|
||||
tools=[NameCollector], first_tool_only=True
|
||||
|
@@ -80,7 +80,7 @@ async def _as_async_iterator(iterable: list) -> AsyncIterator:
|
||||
|
||||
|
||||
async def _collect_events(
|
||||
events: AsyncIterator[StreamEvent], with_nulled_ids: bool = True
|
||||
events: AsyncIterator[StreamEvent], *, with_nulled_ids: bool = True
|
||||
) -> list[StreamEvent]:
|
||||
"""Collect the events and remove the run ids."""
|
||||
materialized_events = [event async for event in events]
|
||||
|
@@ -221,7 +221,7 @@ async def test_config_traceable_async_handoff() -> None:
|
||||
@pytest.mark.parametrize("enabled", [None, True, False])
|
||||
@pytest.mark.parametrize("env", ["", "true"])
|
||||
def test_tracing_enable_disable(
|
||||
mock_get_client: MagicMock, enabled: bool, env: str
|
||||
mock_get_client: MagicMock, *, enabled: bool, env: str
|
||||
) -> None:
|
||||
mock_session = MagicMock()
|
||||
mock_client_ = Client(
|
||||
|
@@ -2002,7 +2002,7 @@ invalid_tool_result_blocks = [
|
||||
*([[block, False] for block in invalid_tool_result_blocks]),
|
||||
],
|
||||
)
|
||||
def test__is_message_content_block(obj: Any, expected: bool) -> None:
|
||||
def test__is_message_content_block(obj: Any, *, expected: bool) -> None:
|
||||
assert _is_message_content_block(obj) is expected
|
||||
|
||||
|
||||
@@ -2014,13 +2014,13 @@ def test__is_message_content_block(obj: Any, expected: bool) -> None:
|
||||
(invalid_tool_result_blocks, False),
|
||||
],
|
||||
)
|
||||
def test__is_message_content_type(obj: Any, expected: bool) -> None:
|
||||
def test__is_message_content_type(obj: Any, *, expected: bool) -> None:
|
||||
assert _is_message_content_type(obj) is expected
|
||||
|
||||
|
||||
@pytest.mark.skipif(PYDANTIC_MAJOR_VERSION != 2, reason="Testing pydantic v2.")
|
||||
@pytest.mark.parametrize("use_v1_namespace", [True, False])
|
||||
def test__get_all_basemodel_annotations_v2(use_v1_namespace: bool) -> None:
|
||||
def test__get_all_basemodel_annotations_v2(*, use_v1_namespace: bool) -> None:
|
||||
A = TypeVar("A")
|
||||
|
||||
if use_v1_namespace:
|
||||
|
@@ -741,7 +741,7 @@ def test_tool_outputs() -> None:
|
||||
@pytest.mark.parametrize("use_extension_typed_dict", [True, False])
|
||||
@pytest.mark.parametrize("use_extension_annotated", [True, False])
|
||||
def test__convert_typed_dict_to_openai_function(
|
||||
use_extension_typed_dict: bool, use_extension_annotated: bool
|
||||
*, use_extension_typed_dict: bool, use_extension_annotated: bool
|
||||
) -> None:
|
||||
typed_dict = ExtensionsTypedDict if use_extension_typed_dict else TypingTypedDict
|
||||
annotated = TypingAnnotated if use_extension_annotated else TypingAnnotated
|
||||
|
Reference in New Issue
Block a user