fix(openai): support phase parameter (#36161)

This commit is contained in:
ccurme
2026-03-22 14:23:24 -04:00
committed by GitHub
parent 64a848a03b
commit 900f8a3513
5 changed files with 133 additions and 18 deletions

View File

@@ -4278,6 +4278,7 @@ def _construct_responses_api_input(messages: Sequence[BaseMessage]) -> list:
# Aggregate content blocks for a single message
if block_type in ("text", "output_text", "refusal"):
msg_id = block.get("id")
phase = block.get("phase")
if block_type in ("text", "output_text"):
# Defensive check: block may not have "text" key
text = block.get("text")
@@ -4303,17 +4304,20 @@ def _construct_responses_api_input(messages: Sequence[BaseMessage]) -> list:
if "content" not in item:
item["content"] = []
item["content"].append(new_block)
if phase is not None:
item["phase"] = phase
break
else:
# If no block with this ID, create a new one
input_.append(
{
"type": "message",
"content": [new_block],
"role": "assistant",
"id": msg_id,
}
)
new_item: dict = {
"type": "message",
"content": [new_block],
"role": "assistant",
"id": msg_id,
}
if phase is not None:
new_item["phase"] = phase
input_.append(new_item)
elif block_type in (
"reasoning",
"compaction",
@@ -4467,6 +4471,7 @@ def _construct_lc_result_from_responses_api(
additional_kwargs: dict = {}
for output in response.output:
if output.type == "message":
phase = getattr(output, "phase", None)
for content in output.content:
if content.type == "output_text":
block = {
@@ -4480,13 +4485,20 @@ def _construct_lc_result_from_responses_api(
else [],
"id": output.id,
}
if phase is not None:
block["phase"] = phase
content_blocks.append(block)
if hasattr(content, "parsed"):
additional_kwargs["parsed"] = content.parsed
if content.type == "refusal":
content_blocks.append(
{"type": "refusal", "refusal": content.refusal, "id": output.id}
)
refusal_block = {
"type": "refusal",
"refusal": content.refusal,
"id": output.id,
}
if phase is not None:
refusal_block["phase"] = phase
content_blocks.append(refusal_block)
elif output.type == "function_call":
content_blocks.append(output.model_dump(exclude_none=True, mode="json"))
try:
@@ -4707,6 +4719,16 @@ def _convert_responses_chunk_to_generation_chunk(
elif chunk.type == "response.output_item.added" and chunk.item.type == "message":
if output_version == "v0":
id = chunk.item.id
elif phase := getattr(chunk.item, "phase", None):
_advance(chunk.output_index, 0)
content.append(
{
"type": "text",
"text": "",
"phase": phase,
"index": current_index,
}
)
else:
pass
elif (

View File

@@ -1347,6 +1347,98 @@ def test_csv_input() -> None:
)
@pytest.mark.default_cassette("test_phase.yaml.gz")
@pytest.mark.vcr
@pytest.mark.parametrize("output_version", ["responses/v1", "v1"])
def test_phase(output_version: str) -> None:
def get_weather(location: str) -> str:
"""Get the weather at a location."""
return "It's sunny."
model = ChatOpenAI(
model="gpt-5.4",
use_responses_api=True,
verbosity="high",
reasoning={"effort": "medium", "summary": "auto"},
output_version=output_version,
)
agent = create_agent(model, tools=[get_weather])
input_message = {
"role": "user",
"content": (
"What's the weather in the oldest major city in the US? State your answer "
"and then generate a tool call this turn."
),
}
result = agent.invoke({"messages": [input_message]})
first_response = result["messages"][1]
text_block = next(
block for block in first_response.content if block["type"] == "text"
)
assert text_block["phase"] == "commentary"
final_response = result["messages"][-1]
text_block = next(
block for block in final_response.content if block["type"] == "text"
)
assert text_block["phase"] == "final_answer"
@pytest.mark.default_cassette("test_phase_streaming.yaml.gz")
@pytest.mark.vcr
@pytest.mark.parametrize("output_version", ["responses/v1", "v1"])
def test_phase_streaming(output_version: str) -> None:
def get_weather(location: str) -> str:
"""Get the weather at a location."""
return "It's sunny."
model = ChatOpenAI(
model="gpt-5.4",
use_responses_api=True,
verbosity="high",
reasoning={"effort": "medium", "summary": "auto"},
streaming=True,
output_version=output_version,
)
agent = create_agent(model, tools=[get_weather])
input_message = {
"role": "user",
"content": (
"What's the weather in the oldest major city in the US? State your answer "
"and then generate a tool call this turn."
),
}
result = agent.invoke({"messages": [input_message]})
first_response = result["messages"][1]
if output_version == "responses/v1":
assert [block["type"] for block in first_response.content] == [
"reasoning",
"text",
"function_call",
]
else:
assert [block["type"] for block in first_response.content] == [
"reasoning",
"text",
"tool_call",
]
text_block = next(
block for block in first_response.content if block["type"] == "text"
)
assert text_block["phase"] == "commentary"
final_response = result["messages"][-1]
assert [block["type"] for block in final_response.content] == ["text"]
text_block = next(
block for block in final_response.content if block["type"] == "text"
)
assert text_block["phase"] == "final_answer"
@pytest.mark.default_cassette("test_tool_search.yaml.gz")
@pytest.mark.vcr
@pytest.mark.parametrize("output_version", ["responses/v1", "v1"])

View File

@@ -1,5 +1,5 @@
version = 1
revision = 3
revision = 2
requires-python = ">=3.10.0, <4.0.0"
resolution-markers = [
"python_full_version >= '3.13' and platform_python_implementation == 'PyPy'",
@@ -547,7 +547,7 @@ wheels = [
[[package]]
name = "langchain"
version = "1.2.12"
version = "1.2.13"
source = { editable = "../../langchain_v1" }
dependencies = [
{ name = "langchain-core" },
@@ -560,6 +560,7 @@ requires-dist = [
{ name = "langchain-anthropic", marker = "extra == 'anthropic'", editable = "../anthropic" },
{ name = "langchain-aws", marker = "extra == 'aws'" },
{ name = "langchain-azure-ai", marker = "extra == 'azure-ai'" },
{ name = "langchain-baseten", marker = "extra == 'baseten'", specifier = ">=0.2.0" },
{ name = "langchain-community", marker = "extra == 'community'" },
{ name = "langchain-core", editable = "../../core" },
{ name = "langchain-deepseek", marker = "extra == 'deepseek'" },
@@ -577,7 +578,7 @@ requires-dist = [
{ name = "langgraph", specifier = ">=1.1.1,<1.2.0" },
{ name = "pydantic", specifier = ">=2.7.4,<3.0.0" },
]
provides-extras = ["community", "anthropic", "openai", "azure-ai", "google-vertexai", "google-genai", "fireworks", "ollama", "together", "mistralai", "huggingface", "groq", "aws", "deepseek", "xai", "perplexity"]
provides-extras = ["community", "anthropic", "openai", "azure-ai", "google-vertexai", "google-genai", "fireworks", "ollama", "together", "mistralai", "huggingface", "groq", "aws", "baseten", "deepseek", "xai", "perplexity"]
[package.metadata.requires-dev]
lint = [{ name = "ruff", specifier = ">=0.15.0,<0.16.0" }]
@@ -610,7 +611,7 @@ typing = [
[[package]]
name = "langchain-core"
version = "1.2.19"
version = "1.2.20"
source = { editable = "../../core" }
dependencies = [
{ name = "jsonpatch" },
@@ -1105,7 +1106,7 @@ wheels = [
[[package]]
name = "openai"
version = "2.26.0"
version = "2.29.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
@@ -1117,9 +1118,9 @@ dependencies = [
{ name = "tqdm" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/d7/91/2a06c4e9597c338cac1e5e5a8dd6f29e1836fc229c4c523529dca387fda8/openai-2.26.0.tar.gz", hash = "sha256:b41f37c140ae0034a6e92b0c509376d907f3a66109935fba2c1b471a7c05a8fb", size = 666702, upload-time = "2026-03-05T23:17:35.874Z" }
sdist = { url = "https://files.pythonhosted.org/packages/b4/15/203d537e58986b5673e7f232453a2a2f110f22757b15921cbdeea392e520/openai-2.29.0.tar.gz", hash = "sha256:32d09eb2f661b38d3edd7d7e1a2943d1633f572596febe64c0cd370c86d52bec", size = 671128, upload-time = "2026-03-17T17:53:49.599Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c6/2e/3f73e8ca53718952222cacd0cf7eecc9db439d020f0c1fe7ae717e4e199a/openai-2.26.0-py3-none-any.whl", hash = "sha256:6151bf8f83802f036117f06cc8a57b3a4da60da9926826cc96747888b57f394f", size = 1136409, upload-time = "2026-03-05T23:17:34.072Z" },
{ url = "https://files.pythonhosted.org/packages/d0/b1/35b6f9c8cf9318e3dbb7146cc82dab4cf61182a8d5406fc9b50864362895/openai-2.29.0-py3-none-any.whl", hash = "sha256:b7c5de513c3286d17c5e29b92c4c98ceaf0d775244ac8159aeb1bddf840eb42a", size = 1141533, upload-time = "2026-03-17T17:53:47.348Z" },
]
[[package]]