From 4ef1aee9d87f0f36cdc1eab4b81083c910607225 Mon Sep 17 00:00:00 2001 From: keenborder786 <21110290@lums.edu.pk> Date: Fri, 11 Jul 2025 06:08:53 +0500 Subject: [PATCH] fix: lint --- .../anthropic/tests/unit_tests/test_chat_models.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/libs/partners/anthropic/tests/unit_tests/test_chat_models.py b/libs/partners/anthropic/tests/unit_tests/test_chat_models.py index a6499bce3cb..3ffc1bf02d6 100644 --- a/libs/partners/anthropic/tests/unit_tests/test_chat_models.py +++ b/libs/partners/anthropic/tests/unit_tests/test_chat_models.py @@ -93,13 +93,18 @@ def test_validate_max_tokens() -> None: assert llm.max_tokens == 1024 # Test that existing max_tokens values are preserved - llm = ChatAnthropic(model="claude-3-5-sonnet-latest", max_tokens=2048, anthropic_api_key="test") + llm = ChatAnthropic( + model="claude-3-5-sonnet-latest", max_tokens=2048, anthropic_api_key="test" + ) assert llm.max_tokens == 2048 # Test that explicitly set max_tokens values are preserved - llm = ChatAnthropic(model="claude-3-5-sonnet-latest", max_tokens=4096, anthropic_api_key="test") + llm = ChatAnthropic( + model="claude-3-5-sonnet-latest", max_tokens=4096, anthropic_api_key="test" + ) assert llm.max_tokens == 4096 + @pytest.mark.requires("anthropic") def test_anthropic_model_name_param() -> None: llm = ChatAnthropic(model_name="foo") # type: ignore[call-arg, call-arg] @@ -142,7 +147,6 @@ def test_anthropic_initialization() -> None: ChatAnthropic(model="test", anthropic_api_key="test") # type: ignore[call-arg, call-arg] - def test__format_output() -> None: anthropic_msg = Message( id="foo",