fix(openai): fix test on standard outputs branch (#32329)

This commit is contained in:
ccurme 2025-07-30 14:33:42 -03:00 committed by GitHub
parent a0abb79f6d
commit 3afcd6132e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -1,8 +1,8 @@
from typing import Any, Literal, Optional
from typing import Any, Optional
from unittest.mock import MagicMock, patch
import pytest
from langchain_core.messages import AIMessageChunk, BaseMessageChunk
from langchain_core.messages.v1 import AIMessageChunk as AIMessageChunkV1
from openai.types.responses import (
ResponseCompletedEvent,
ResponseContentPartAddedEvent,
@ -37,7 +37,7 @@ from openai.types.responses.response_usage import (
from openai.types.shared.reasoning import Reasoning
from openai.types.shared.response_format_text import ResponseFormatText
from langchain_openai import ChatOpenAI
from langchain_openai import ChatOpenAI, ChatOpenAIV1
from tests.unit_tests.chat_models.test_base import MockSyncContextManager
responses_stream = [
@ -621,98 +621,9 @@ def _strip_none(obj: Any) -> Any:
return obj
@pytest.mark.parametrize(
"output_version, expected_content",
[
(
"responses/v1",
[
{
"id": "rs_123",
"summary": [
{
"index": 0,
"type": "summary_text",
"text": "reasoning block one",
},
{
"index": 1,
"type": "summary_text",
"text": "another reasoning block",
},
],
"type": "reasoning",
"index": 0,
},
{"type": "text", "text": "text block one", "index": 1, "id": "msg_123"},
{
"type": "text",
"text": "another text block",
"index": 2,
"id": "msg_123",
},
{
"id": "rs_234",
"summary": [
{"index": 0, "type": "summary_text", "text": "more reasoning"},
{
"index": 1,
"type": "summary_text",
"text": "still more reasoning",
},
],
"type": "reasoning",
"index": 3,
},
{"type": "text", "text": "more", "index": 4, "id": "msg_234"},
{"type": "text", "text": "text", "index": 5, "id": "msg_234"},
],
),
(
"v1",
[
{
"type": "reasoning",
"reasoning": "reasoning block one",
"id": "rs_123",
"index": 0,
},
{
"type": "reasoning",
"reasoning": "another reasoning block",
"id": "rs_123",
"index": 1,
},
{"type": "text", "text": "text block one", "index": 2, "id": "msg_123"},
{
"type": "text",
"text": "another text block",
"index": 3,
"id": "msg_123",
},
{
"type": "reasoning",
"reasoning": "more reasoning",
"id": "rs_234",
"index": 4,
},
{
"type": "reasoning",
"reasoning": "still more reasoning",
"id": "rs_234",
"index": 5,
},
{"type": "text", "text": "more", "index": 6, "id": "msg_234"},
{"type": "text", "text": "text", "index": 7, "id": "msg_234"},
],
),
],
)
def test_responses_stream(
output_version: Literal["v0", "responses/v1"], expected_content: list[dict]
) -> None:
def test_responses_stream() -> None:
llm = ChatOpenAI(
model="o4-mini", use_responses_api=True, output_version=output_version
model="o4-mini", use_responses_api=True, output_version="responses/v1"
)
mock_client = MagicMock()
@ -728,8 +639,32 @@ def test_responses_stream(
assert isinstance(chunk, AIMessageChunk)
full = chunk if full is None else full + chunk
chunks.append(chunk)
assert isinstance(full, AIMessageChunk)
expected_content = [
{
"id": "rs_123",
"summary": [
{"index": 0, "type": "summary_text", "text": "reasoning block one"},
{"index": 1, "type": "summary_text", "text": "another reasoning block"},
],
"type": "reasoning",
"index": 0,
},
{"type": "text", "text": "text block one", "index": 1, "id": "msg_123"},
{"type": "text", "text": "another text block", "index": 2, "id": "msg_123"},
{
"id": "rs_234",
"summary": [
{"index": 0, "type": "summary_text", "text": "more reasoning"},
{"index": 1, "type": "summary_text", "text": "still more reasoning"},
],
"type": "reasoning",
"index": 3,
},
{"type": "text", "text": "more", "index": 4, "id": "msg_234"},
{"type": "text", "text": "text", "index": 5, "id": "msg_234"},
]
assert full.content == expected_content
assert full.additional_kwargs == {}
assert full.id == "resp_123"
@ -749,3 +684,71 @@ def test_responses_stream(
dumped = _strip_none(item.model_dump())
_ = dumped.pop("status", None)
assert dumped == payload["input"][idx]
def test_responses_stream_v1() -> None:
llm = ChatOpenAIV1(model="o4-mini", use_responses_api=True)
mock_client = MagicMock()
def mock_create(*args: Any, **kwargs: Any) -> MockSyncContextManager:
return MockSyncContextManager(responses_stream)
mock_client.responses.create = mock_create
full: Optional[AIMessageChunkV1] = None
chunks = []
with patch.object(llm, "root_client", mock_client):
for chunk in llm.stream("test"):
assert isinstance(chunk, AIMessageChunkV1)
full = chunk if full is None else full + chunk
chunks.append(chunk)
assert isinstance(full, AIMessageChunkV1)
expected_content = [
{
"type": "reasoning",
"reasoning": "reasoning block one",
"id": "rs_123",
"index": 0,
},
{
"type": "reasoning",
"reasoning": "another reasoning block",
"id": "rs_123",
"index": 1,
},
{"type": "text", "text": "text block one", "index": 2, "id": "msg_123"},
{"type": "text", "text": "another text block", "index": 3, "id": "msg_123"},
{
"type": "reasoning",
"reasoning": "more reasoning",
"id": "rs_234",
"index": 4,
},
{
"type": "reasoning",
"reasoning": "still more reasoning",
"id": "rs_234",
"index": 5,
},
{"type": "text", "text": "more", "index": 6, "id": "msg_234"},
{"type": "text", "text": "text", "index": 7, "id": "msg_234"},
]
assert full.content == expected_content
assert full.id == "resp_123"
# Test reconstruction
payload = llm._get_request_payload([full])
completed = [
item
for item in responses_stream
if item.type == "response.completed" # type: ignore[attr-defined]
]
assert len(completed) == 1
response = completed[0].response # type: ignore[attr-defined]
assert len(response.output) == len(payload["input"])
for idx, item in enumerate(response.output):
dumped = _strip_none(item.model_dump())
_ = dumped.pop("status", None)
assert dumped == payload["input"][idx]