mirror of
https://github.com/hwchase17/langchain.git
synced 2026-03-18 11:07:36 +00:00
feat(openai): support tool search and defer_loading for openai responses api
This commit is contained in:
@@ -508,6 +508,8 @@ _WellKnownOpenAITools = (
|
||||
"image_generation",
|
||||
"web_search_preview",
|
||||
"web_search",
|
||||
"tool_search",
|
||||
"namespace",
|
||||
)
|
||||
|
||||
|
||||
@@ -570,7 +572,16 @@ def convert_to_openai_tool(
|
||||
oai_tool["format"] = tool.metadata["format"]
|
||||
return oai_tool
|
||||
oai_function = convert_to_openai_function(tool, strict=strict)
|
||||
return {"type": "function", "function": oai_function}
|
||||
result: dict[str, Any] = {"type": "function", "function": oai_function}
|
||||
if (
|
||||
isinstance(tool, langchain_core.tools.base.BaseTool)
|
||||
and hasattr(tool, "extras")
|
||||
and isinstance(tool.extras, dict)
|
||||
):
|
||||
for key in ("defer_loading",):
|
||||
if key in tool.extras:
|
||||
result[key] = tool.extras[key]
|
||||
return result
|
||||
|
||||
|
||||
def convert_to_json_schema(
|
||||
|
||||
@@ -166,6 +166,7 @@ WellKnownTools = (
|
||||
"code_interpreter",
|
||||
"mcp",
|
||||
"image_generation",
|
||||
"tool_search",
|
||||
)
|
||||
|
||||
|
||||
@@ -3981,7 +3982,10 @@ def _construct_responses_api_payload(
|
||||
# chat api: {"type": "function", "function": {"name": "...", "description": "...", "parameters": {...}, "strict": ...}} # noqa: E501
|
||||
# responses api: {"type": "function", "name": "...", "description": "...", "parameters": {...}, "strict": ...} # noqa: E501
|
||||
if tool["type"] == "function" and "function" in tool:
|
||||
new_tools.append({"type": "function", **tool["function"]})
|
||||
new_tool = {"type": "function", **tool["function"]}
|
||||
if tool.get("defer_loading"):
|
||||
new_tool["defer_loading"] = tool["defer_loading"]
|
||||
new_tools.append(new_tool)
|
||||
else:
|
||||
if tool["type"] == "image_generation":
|
||||
# Handle partial images (not yet supported)
|
||||
|
||||
@@ -1267,3 +1267,120 @@ def test_csv_input() -> None:
|
||||
"3" in str(response2.content).lower()
|
||||
or "three" in str(response2.content).lower()
|
||||
)
|
||||
|
||||
|
||||
def test_tool_search() -> None:
|
||||
"""Test tool search with deferred loading via extras."""
|
||||
from langchain_core.tools import tool
|
||||
|
||||
@tool(extras={"defer_loading": True})
|
||||
def list_open_orders(customer_id: str) -> str:
|
||||
"""List open orders for a customer ID."""
|
||||
return f"Orders for {customer_id}: [order_1, order_2]"
|
||||
|
||||
@tool(extras={"defer_loading": True})
|
||||
def get_customer_profile(customer_id: str) -> str:
|
||||
"""Fetch a customer profile by customer ID."""
|
||||
return f"Profile for {customer_id}"
|
||||
|
||||
llm = ChatOpenAI(model="gpt-4o", use_responses_api=True)
|
||||
bound = llm.bind_tools(
|
||||
[list_open_orders, get_customer_profile, {"type": "tool_search"}],
|
||||
)
|
||||
|
||||
payload = llm._get_request_payload(
|
||||
[HumanMessage("List open orders for customer CUST-12345.")],
|
||||
**bound.kwargs, # type: ignore[attr-defined]
|
||||
)
|
||||
tools = payload["tools"]
|
||||
orders_tool = next(t for t in tools if t.get("name") == "list_open_orders")
|
||||
assert orders_tool["defer_loading"] is True
|
||||
assert orders_tool["type"] == "function"
|
||||
profile_tool = next(t for t in tools if t.get("name") == "get_customer_profile")
|
||||
assert profile_tool["defer_loading"] is True
|
||||
tool_search = next(t for t in tools if t.get("type") == "tool_search")
|
||||
assert tool_search == {"type": "tool_search"}
|
||||
|
||||
response = bound.invoke("List open orders for customer CUST-12345.")
|
||||
assert isinstance(response, AIMessage)
|
||||
assert response.tool_calls
|
||||
|
||||
|
||||
def test_tool_search_dict_tools() -> None:
|
||||
"""Test tool search with raw dict tools (Responses API format)."""
|
||||
llm = ChatOpenAI(model="gpt-4o", use_responses_api=True)
|
||||
bound = llm.bind_tools(
|
||||
[
|
||||
{
|
||||
"type": "function",
|
||||
"name": "list_open_orders",
|
||||
"description": "List open orders for a customer ID.",
|
||||
"defer_loading": True,
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {"customer_id": {"type": "string"}},
|
||||
"required": ["customer_id"],
|
||||
"additionalProperties": False,
|
||||
},
|
||||
},
|
||||
{"type": "tool_search"},
|
||||
],
|
||||
)
|
||||
|
||||
payload = llm._get_request_payload(
|
||||
[HumanMessage("List open orders for customer CUST-12345.")],
|
||||
**bound.kwargs, # type: ignore[attr-defined]
|
||||
)
|
||||
tools = payload["tools"]
|
||||
orders_tool = next(t for t in tools if t.get("name") == "list_open_orders")
|
||||
assert orders_tool["defer_loading"] is True
|
||||
tool_search = next(t for t in tools if t.get("type") == "tool_search")
|
||||
assert tool_search == {"type": "tool_search"}
|
||||
|
||||
response = bound.invoke("List open orders for customer CUST-12345.")
|
||||
assert isinstance(response, AIMessage)
|
||||
assert response.tool_calls
|
||||
|
||||
|
||||
def test_tool_search_with_namespace() -> None:
|
||||
"""Test tool search with namespace tools."""
|
||||
llm = ChatOpenAI(model="gpt-4o", use_responses_api=True)
|
||||
bound = llm.bind_tools(
|
||||
[
|
||||
{
|
||||
"type": "namespace",
|
||||
"name": "crm",
|
||||
"description": "CRM tools for customer lookup and order management.",
|
||||
"tools": [
|
||||
{
|
||||
"type": "function",
|
||||
"name": "list_open_orders",
|
||||
"description": "List open orders for a customer ID.",
|
||||
"defer_loading": True,
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {"customer_id": {"type": "string"}},
|
||||
"required": ["customer_id"],
|
||||
"additionalProperties": False,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
{"type": "tool_search"},
|
||||
],
|
||||
)
|
||||
|
||||
payload = llm._get_request_payload(
|
||||
[HumanMessage("List open orders for customer CUST-12345.")],
|
||||
**bound.kwargs, # type: ignore[attr-defined]
|
||||
)
|
||||
tools = payload["tools"]
|
||||
ns_tool = next(t for t in tools if t.get("type") == "namespace")
|
||||
assert ns_tool["name"] == "crm"
|
||||
assert ns_tool["tools"][0]["defer_loading"] is True
|
||||
tool_search = next(t for t in tools if t.get("type") == "tool_search")
|
||||
assert tool_search == {"type": "tool_search"}
|
||||
|
||||
response = bound.invoke("List open orders for customer CUST-12345.")
|
||||
assert isinstance(response, AIMessage)
|
||||
assert response.tool_calls
|
||||
|
||||
@@ -3474,3 +3474,33 @@ def test_context_overflow_error_backwards_compatibility() -> None:
|
||||
# Verify it's both types (multiple inheritance)
|
||||
assert isinstance(exc_info.value, openai.BadRequestError)
|
||||
assert isinstance(exc_info.value, ContextOverflowError)
|
||||
|
||||
|
||||
def test_tool_search_payload() -> None:
|
||||
from langchain_core.tools import tool
|
||||
|
||||
@tool(extras={"defer_loading": True})
|
||||
def get_weather(city: str) -> str:
|
||||
"""Get the weather for a city."""
|
||||
return "sunny"
|
||||
|
||||
@tool
|
||||
def get_time(city: str) -> str:
|
||||
"""Get the time for a city."""
|
||||
return "12:00"
|
||||
|
||||
llm = ChatOpenAI(model="gpt-4o", api_key=SecretStr("test"), use_responses_api=True)
|
||||
bound = llm.bind_tools(
|
||||
[get_weather, get_time, {"type": "tool_search"}],
|
||||
)
|
||||
payload = llm._get_request_payload(
|
||||
[HumanMessage("What's the weather in NYC?")],
|
||||
**bound.kwargs, # type: ignore[attr-defined]
|
||||
)
|
||||
tools = payload["tools"]
|
||||
weather_tool = next(t for t in tools if t.get("name") == "get_weather")
|
||||
assert weather_tool["defer_loading"] is True
|
||||
time_tool = next(t for t in tools if t.get("name") == "get_time")
|
||||
assert "defer_loading" not in time_tool
|
||||
tool_search = next(t for t in tools if t.get("type") == "tool_search")
|
||||
assert tool_search == {"type": "tool_search"}
|
||||
|
||||
Reference in New Issue
Block a user