diff --git a/libs/partners/openai/langchain_openai/chat_models/base.py b/libs/partners/openai/langchain_openai/chat_models/base.py index a8702359b36..b01a560e232 100644 --- a/libs/partners/openai/langchain_openai/chat_models/base.py +++ b/libs/partners/openai/langchain_openai/chat_models/base.py @@ -3599,6 +3599,13 @@ def _construct_responses_api_payload( } else: pass + + verbosity = payload.pop("verbosity", None) + if verbosity is not None: + if "text" not in payload: + payload["text"] = {"format": {"type": "text"}} + payload["text"]["verbosity"] = verbosity + return payload diff --git a/libs/partners/openai/pyproject.toml b/libs/partners/openai/pyproject.toml index 3bc90cdf3fe..858cc45ca48 100644 --- a/libs/partners/openai/pyproject.toml +++ b/libs/partners/openai/pyproject.toml @@ -8,7 +8,7 @@ license = { text = "MIT" } requires-python = ">=3.9" dependencies = [ "langchain-core<1.0.0,>=0.3.74", - "openai<2.0.0,>=1.86.0", + "openai<2.0.0,>=1.99.5", "tiktoken<1,>=0.7", ] name = "langchain-openai" diff --git a/libs/partners/openai/tests/integration_tests/chat_models/test_responses_api.py b/libs/partners/openai/tests/integration_tests/chat_models/test_responses_api.py index 32d3f199c85..4113b395f7a 100644 --- a/libs/partners/openai/tests/integration_tests/chat_models/test_responses_api.py +++ b/libs/partners/openai/tests/integration_tests/chat_models/test_responses_api.py @@ -674,28 +674,17 @@ def test_image_generation_multi_turn() -> None: assert set(tool_output2.keys()).issubset(expected_keys) -@pytest.mark.xfail( - reason="verbosity parameter not yet supported by OpenAI Responses API" -) def test_verbosity_parameter() -> None: """Test verbosity parameter with Responses API. - TODO: This test is expected to fail until OpenAI enables verbosity support - in the Responses API for available models. The parameter is properly implemented - in the codebase but the API currently returns 'Unknown parameter: verbosity'. - Remove @pytest.mark.xfail when OpenAI adds support. + Tests that the verbosity parameter works correctly with the OpenAI Responses API. + """ - llm = ChatOpenAI( - model=MODEL_NAME, - verbosity="medium", - use_responses_api=True, - output_version="responses/v1", - ) + llm = ChatOpenAI(model=MODEL_NAME, verbosity="medium", use_responses_api=True) response = llm.invoke([HumanMessage(content="Hello, explain quantum computing.")]) assert isinstance(response, AIMessage) assert response.content - # When verbosity works, we expect the response to respect the verbosity level @pytest.mark.vcr() diff --git a/libs/partners/openai/tests/unit_tests/chat_models/test_base.py b/libs/partners/openai/tests/unit_tests/chat_models/test_base.py index 73185790602..1d79f2a2d91 100644 --- a/libs/partners/openai/tests/unit_tests/chat_models/test_base.py +++ b/libs/partners/openai/tests/unit_tests/chat_models/test_base.py @@ -1200,7 +1200,8 @@ def test_verbosity_parameter_payload() -> None: messages = [{"role": "user", "content": "hello"}] payload = llm._get_request_payload(messages, stop=None) - assert payload["verbosity"] == "high" + assert payload["text"]["verbosity"] == "high" + assert payload["text"]["format"]["type"] == "text" def test_structured_output_old_model() -> None: diff --git a/libs/partners/openai/uv.lock b/libs/partners/openai/uv.lock index f626c1bacdc..7615c85c130 100644 --- a/libs/partners/openai/uv.lock +++ b/libs/partners/openai/uv.lock @@ -588,7 +588,7 @@ typing = [ [package.metadata] requires-dist = [ { name = "langchain-core", editable = "../../core" }, - { name = "openai", specifier = ">=1.86.0,<2.0.0" }, + { name = "openai", specifier = ">=1.99.5,<2.0.0" }, { name = "tiktoken", specifier = ">=0.7,<1" }, ] @@ -995,7 +995,7 @@ wheels = [ [[package]] name = "openai" -version = "1.99.3" +version = "1.99.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -1007,9 +1007,9 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/72/d3/c372420c8ca1c60e785fd8c19e536cea8f16b0cfdcdad6458e1d8884f2ea/openai-1.99.3.tar.gz", hash = "sha256:1a0e2910e4545d828c14218f2ac3276827c94a043f5353e43b9413b38b497897", size = 504932, upload-time = "2025-08-07T20:35:15.893Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2f/4a/16b1b6ee8a62cbfb59057f97f6d9b7bb5ce529047d80bc0b406f65dfdc48/openai-1.99.5.tar.gz", hash = "sha256:aa97ac3326cac7949c5e4ac0274c454c1d19c939760107ae0d3948fc26a924ca", size = 505144, upload-time = "2025-08-08T16:44:46.865Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/92/bc/e52f49940b4e320629da7db09c90a2407a48c612cff397b4b41b7e58cdf9/openai-1.99.3-py3-none-any.whl", hash = "sha256:c786a03f6cddadb5ee42c6d749aa4f6134fe14fdd7d69a667e5e7ce7fd29a719", size = 785776, upload-time = "2025-08-07T20:35:13.653Z" }, + { url = "https://files.pythonhosted.org/packages/e6/f2/2472ae020f5156a994710bf926a76915c71bc7b5debf7b81a11506ec8414/openai-1.99.5-py3-none-any.whl", hash = "sha256:4e870f9501b7c36132e2be13313ce3c4d6915a837e7a299c483aab6a6d4412e9", size = 786246, upload-time = "2025-08-08T16:44:45.062Z" }, ] [[package]]