diff --git a/libs/partners/openai/langchain_openai/chat_models/base.py b/libs/partners/openai/langchain_openai/chat_models/base.py index 64bb56a5a2e..c37625134d7 100644 --- a/libs/partners/openai/langchain_openai/chat_models/base.py +++ b/libs/partners/openai/langchain_openai/chat_models/base.py @@ -3191,7 +3191,7 @@ def _construct_responses_api_input(messages: Sequence[BaseMessage]) -> list: input_.append(msg) input_.extend(function_calls) input_.extend(computer_calls) - elif msg["role"] == "user": + elif msg["role"] in ("user", "system", "developer"): if isinstance(msg["content"], list): new_blocks = [] for block in msg["content"]: diff --git a/libs/partners/openai/tests/unit_tests/chat_models/test_base.py b/libs/partners/openai/tests/unit_tests/chat_models/test_base.py index e114ec06879..c762b3dab5d 100644 --- a/libs/partners/openai/tests/unit_tests/chat_models/test_base.py +++ b/libs/partners/openai/tests/unit_tests/chat_models/test_base.py @@ -956,13 +956,17 @@ def test__get_request_payload() -> None: messages: list = [ SystemMessage("hello"), SystemMessage("bye", additional_kwargs={"__openai_role__": "developer"}), + SystemMessage(content=[{"type": "text", "text": "hello!"}]), {"role": "human", "content": "how are you"}, + {"role": "user", "content": [{"type": "text", "text": "feeling today"}]}, ] expected = { "messages": [ {"role": "system", "content": "hello"}, {"role": "developer", "content": "bye"}, + {"role": "system", "content": [{"type": "text", "text": "hello!"}]}, {"role": "user", "content": "how are you"}, + {"role": "user", "content": [{"type": "text", "text": "feeling today"}]}, ], "model": "gpt-4o-2024-08-06", "stream": False, @@ -977,7 +981,9 @@ def test__get_request_payload() -> None: "messages": [ {"role": "developer", "content": "hello"}, {"role": "developer", "content": "bye"}, + {"role": "developer", "content": [{"type": "text", "text": "hello!"}]}, {"role": "user", "content": "how are you"}, + {"role": "user", "content": [{"type": "text", "text": "feeling today"}]}, ], "model": "o3-mini", "stream": False, @@ -1658,6 +1664,9 @@ def test__construct_responses_api_input_multiple_message_types() -> None: """Test conversion of a conversation with multiple message types.""" messages = [ SystemMessage(content="You are a helpful assistant."), + SystemMessage( + content=[{"type": "text", "text": "You are a very helpful assistant!"}] + ), HumanMessage(content="What's the weather in San Francisco?"), HumanMessage( content=[{"type": "text", "text": "What's the weather in San Francisco?"}] @@ -1698,31 +1707,36 @@ def test__construct_responses_api_input_multiple_message_types() -> None: assert result[0]["role"] == "system" assert result[0]["content"] == "You are a helpful assistant." + assert result[1]["role"] == "system" + assert result[1]["content"] == [ + {"type": "input_text", "text": "You are a very helpful assistant!"} + ] + # Check human message - assert result[1]["role"] == "user" - assert result[1]["content"] == "What's the weather in San Francisco?" assert result[2]["role"] == "user" - assert result[2]["content"] == [ + assert result[2]["content"] == "What's the weather in San Francisco?" + assert result[3]["role"] == "user" + assert result[3]["content"] == [ {"type": "input_text", "text": "What's the weather in San Francisco?"} ] # Check function call - assert result[3]["type"] == "function_call" - assert result[3]["name"] == "get_weather" - assert result[3]["arguments"] == '{"location": "San Francisco"}' - assert result[3]["call_id"] == "call_123" - assert result[3]["id"] == "func_456" + assert result[4]["type"] == "function_call" + assert result[4]["name"] == "get_weather" + assert result[4]["arguments"] == '{"location": "San Francisco"}' + assert result[4]["call_id"] == "call_123" + assert result[4]["id"] == "func_456" # Check function call output - assert result[4]["type"] == "function_call_output" - assert result[4]["output"] == '{"temperature": 72, "conditions": "sunny"}' - assert result[4]["call_id"] == "call_123" - - assert result[5]["role"] == "assistant" - assert result[5]["content"] == "The weather in San Francisco is 72°F and sunny." + assert result[5]["type"] == "function_call_output" + assert result[5]["output"] == '{"temperature": 72, "conditions": "sunny"}' + assert result[5]["call_id"] == "call_123" assert result[6]["role"] == "assistant" - assert result[6]["content"] == [ + assert result[6]["content"] == "The weather in San Francisco is 72°F and sunny." + + assert result[7]["role"] == "assistant" + assert result[7]["content"] == [ { "type": "output_text", "text": "The weather in San Francisco is 72°F and sunny.", @@ -1733,6 +1747,25 @@ def test__construct_responses_api_input_multiple_message_types() -> None: # assert no mutation has occurred assert messages_copy == messages + # Test dict messages + llm = ChatOpenAI(model="o4-mini", use_responses_api=True) + message_dicts: list = [ + {"role": "developer", "content": "This is a developer message."}, + { + "role": "developer", + "content": [{"type": "text", "text": "This is a developer message!"}], + }, + ] + payload = llm._get_request_payload(message_dicts) + result = payload["input"] + assert len(result) == 2 + assert result[0]["role"] == "developer" + assert result[0]["content"] == "This is a developer message." + assert result[1]["role"] == "developer" + assert result[1]["content"] == [ + {"type": "input_text", "text": "This is a developer message!"} + ] + def test_service_tier() -> None: llm = ChatOpenAI(model="o4-mini", service_tier="flex")