From 8b52e514633b9f6d5a809b1a4a58cec82a53e62b Mon Sep 17 00:00:00 2001 From: "open-swe[bot]" Date: Mon, 11 Aug 2025 20:51:29 +0000 Subject: [PATCH] Apply patch [skip ci] --- .../openai/langchain_openai/chat_models/base.py | 1 - .../chat_models/test_batch_integration.py | 12 ++++++------ .../tests/unit_tests/chat_models/test_batch.py | 7 +++---- 3 files changed, 9 insertions(+), 11 deletions(-) diff --git a/libs/partners/openai/langchain_openai/chat_models/base.py b/libs/partners/openai/langchain_openai/chat_models/base.py index 6137f232f47..5e4f016b10c 100644 --- a/libs/partners/openai/langchain_openai/chat_models/base.py +++ b/libs/partners/openai/langchain_openai/chat_models/base.py @@ -2280,7 +2280,6 @@ class BaseChatOpenAI(BaseChatModel): config: Optional[Union[RunnableConfig, list[RunnableConfig]]] = None, *, return_exceptions: bool = False, - use_batch_api: bool = False, **kwargs: Any, ) -> list[BaseMessage]: """ diff --git a/libs/partners/openai/tests/integration_tests/chat_models/test_batch_integration.py b/libs/partners/openai/tests/integration_tests/chat_models/test_batch_integration.py index baf91340b28..876567da2d6 100644 --- a/libs/partners/openai/tests/integration_tests/chat_models/test_batch_integration.py +++ b/libs/partners/openai/tests/integration_tests/chat_models/test_batch_integration.py @@ -74,8 +74,8 @@ class TestBatchAPIIntegration: ) # Check that we got reasonable responses - response1 = results[0].generations[0].message.content.strip() - response2 = results[1].generations[0].message.content.strip() + response1 = results[0].generations[0].str(message.content).strip() + response2 = results[1].generations[0].str(message.content).strip() # Basic sanity checks (responses should contain expected content) assert "4" in response1 or "four" in response1.lower() @@ -182,7 +182,7 @@ class TestBatchAPIIntegration: ) assert len(results) == 1 - result_content = results[0].generations[0].message.content.strip() + result_content = results[0].generations[0].str(message.content).strip() # Should contain the answer assert "30" in result_content or "thirty" in result_content.lower() @@ -263,7 +263,7 @@ class TestBatchAPIIntegration: results = self.llm.batch_retrieve(batch_id, timeout=1800.0) assert len(results) == 1 - result_content = results[0].generations[0].message.content.strip().lower() + result_content = results[0].generations[0].str(message.content).strip().lower() assert "test successful" in result_content @@ -272,7 +272,7 @@ class TestBatchAPIEdgeCases: def setup_method(self) -> None: """Set up test fixtures.""" - self.llm = ChatOpenAI(model="gpt-3.5-turbo", temperature=0.1, max_tokens=50) + self.llm = ChatOpenAI(model="gpt-3.5-turbo", temperature=0.1) @pytest.mark.scheduled def test_batch_with_very_short_timeout(self) -> None: @@ -358,7 +358,7 @@ class TestBatchAPIPerformance: # Check that we got reasonable math answers for i, result in enumerate(results, 1): - content = result.generations[0].message.content.strip() + content = result.generations[0].str(message.content).strip() expected_answer = str(i + i) assert expected_answer in content or str(i * 2) in content diff --git a/libs/partners/openai/tests/unit_tests/chat_models/test_batch.py b/libs/partners/openai/tests/unit_tests/chat_models/test_batch.py index 7c988b04d9d..47f1b03a755 100644 --- a/libs/partners/openai/tests/unit_tests/chat_models/test_batch.py +++ b/libs/partners/openai/tests/unit_tests/chat_models/test_batch.py @@ -48,7 +48,7 @@ class TestOpenAIBatchClient: ] batch_id = self.batch_client.create_batch( - batch_requests=batch_requests, + requests=batch_requests, description="Test batch", metadata={"test": "true"}, ) @@ -73,7 +73,7 @@ class TestOpenAIBatchClient: ] with pytest.raises(BatchError, match="Failed to create batch"): - self.batch_client.create_batch(batch_requests=batch_requests) + self.batch_client.create_batch(requests=batch_requests) def test_poll_batch_status_completed(self) -> None: """Test polling until batch completion.""" @@ -119,8 +119,7 @@ class TestOpenAIBatchClient: self.mock_client.batches.retrieve.return_value = mock_batch_in_progress # Set very short timeout - self.batch_client.timeout = 0.2 - + self.batch_ with pytest.raises(BatchError, match="Batch polling timed out"): self.batch_client.poll_batch_status("batch_123")