openai[patch]: pass through with_structured_output kwargs (#31518)

Support 
```python
from langchain.chat_models import init_chat_model
from pydantic import BaseModel


class ResponseSchema(BaseModel):
    response: str


def get_weather(location: str) -> str:
    """Get weather"""
    pass

llm = init_chat_model("openai:gpt-4o-mini")

structured_llm = llm.with_structured_output(
    ResponseSchema,
    tools=[get_weather],
    strict=True,
    include_raw=True,
    tool_choice="required",
    parallel_tool_calls=False,
)

structured_llm.invoke("whats up?")
```
This commit is contained in:
Bagatur 2025-06-06 11:17:34 -04:00 committed by GitHub
parent 0375848f6c
commit 761f8c3231
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 50 additions and 24 deletions

View File

@ -854,7 +854,7 @@ class AzureChatOpenAI(BaseChatOpenAI):
"parsed": None,
}
kwargs: Additional keyword args aren't supported.
kwargs: Additional keyword args are passed through to the model.
Returns:
A Runnable that takes same inputs as a :class:`langchain_core.language_models.chat.BaseChatModel`.
@ -880,6 +880,12 @@ class AzureChatOpenAI(BaseChatOpenAI):
``method`` default changed from "function_calling" to "json_schema".
.. versionchanged:: 0.3.12
Support for ``tools`` added.
.. versionchanged:: 0.3.21
Pass ``kwargs`` through to the model.
.. dropdown:: Example: schema=Pydantic class, method="json_schema", include_raw=False, strict=True
Note, OpenAI has a number of restrictions on what types of schemas can be

View File

@ -1631,7 +1631,7 @@ class BaseChatOpenAI(BaseChatModel):
"parsed": None,
}
kwargs: Additional keyword args aren't supported.
kwargs: Additional keyword args are passed through to the model.
Returns:
A Runnable that takes same inputs as a :class:`langchain_core.language_models.chat.BaseChatModel`.
@ -1655,9 +1655,10 @@ class BaseChatOpenAI(BaseChatModel):
.. versionchanged:: 0.3.12
Support for ``tools`` added.
.. versionchanged:: 0.3.21
Pass ``kwargs`` through to the model.
""" # noqa: E501
if kwargs:
raise ValueError(f"Received unsupported arguments {kwargs}")
if strict is not None and method == "json_mode":
raise ValueError(
"Argument `strict` is not supported with `method`='json_mode'"
@ -1700,13 +1701,18 @@ class BaseChatOpenAI(BaseChatModel):
)
tool_name = convert_to_openai_tool(schema)["function"]["name"]
bind_kwargs = self._filter_disabled_params(
tool_choice=tool_name,
parallel_tool_calls=False,
strict=strict,
ls_structured_output_format={
"kwargs": {"method": method, "strict": strict},
"schema": schema,
},
**{
**dict(
tool_choice=tool_name,
parallel_tool_calls=False,
strict=strict,
ls_structured_output_format={
"kwargs": {"method": method, "strict": strict},
"schema": schema,
},
),
**kwargs,
}
)
llm = self.bind_tools([schema], **bind_kwargs)
@ -1721,11 +1727,16 @@ class BaseChatOpenAI(BaseChatModel):
)
elif method == "json_mode":
llm = self.bind(
response_format={"type": "json_object"},
ls_structured_output_format={
"kwargs": {"method": method},
"schema": schema,
},
**{
**dict(
response_format={"type": "json_object"},
ls_structured_output_format={
"kwargs": {"method": method},
"schema": schema,
},
),
**kwargs,
}
)
output_parser = (
PydanticOutputParser(pydantic_object=schema) # type: ignore[arg-type]
@ -1739,13 +1750,16 @@ class BaseChatOpenAI(BaseChatModel):
"Received None."
)
response_format = _convert_to_openai_response_format(schema, strict=strict)
bind_kwargs = dict(
response_format=response_format,
ls_structured_output_format={
"kwargs": {"method": method, "strict": strict},
"schema": convert_to_openai_tool(schema),
},
)
bind_kwargs = {
**dict(
response_format=response_format,
ls_structured_output_format={
"kwargs": {"method": method, "strict": strict},
"schema": convert_to_openai_tool(schema),
},
**kwargs,
)
}
if tools:
bind_kwargs["tools"] = [
convert_to_openai_tool(t, strict=strict) for t in tools
@ -2597,7 +2611,7 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override]
"parsed": None,
}
kwargs: Additional keyword args aren't supported.
kwargs: Additional keyword args are passed through to the model.
Returns:
A Runnable that takes same inputs as a :class:`langchain_core.language_models.chat.BaseChatModel`.
@ -2623,6 +2637,12 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override]
``method`` default changed from "function_calling" to "json_schema".
.. versionchanged:: 0.3.12
Support for ``tools`` added.
.. versionchanged:: 0.3.21
Pass ``kwargs`` through to the model.
.. dropdown:: Example: schema=Pydantic class, method="json_schema", include_raw=False, strict=True
Note, OpenAI has a number of restrictions on what types of schemas can be