mirror of
https://github.com/hwchase17/langchain.git
synced 2025-09-07 22:11:51 +00:00
openai[patch]: pass through with_structured_output kwargs (#31518)
Support ```python from langchain.chat_models import init_chat_model from pydantic import BaseModel class ResponseSchema(BaseModel): response: str def get_weather(location: str) -> str: """Get weather""" pass llm = init_chat_model("openai:gpt-4o-mini") structured_llm = llm.with_structured_output( ResponseSchema, tools=[get_weather], strict=True, include_raw=True, tool_choice="required", parallel_tool_calls=False, ) structured_llm.invoke("whats up?") ```
This commit is contained in:
@@ -854,7 +854,7 @@ class AzureChatOpenAI(BaseChatOpenAI):
|
||||
"parsed": None,
|
||||
}
|
||||
|
||||
kwargs: Additional keyword args aren't supported.
|
||||
kwargs: Additional keyword args are passed through to the model.
|
||||
|
||||
Returns:
|
||||
A Runnable that takes same inputs as a :class:`langchain_core.language_models.chat.BaseChatModel`.
|
||||
@@ -880,6 +880,12 @@ class AzureChatOpenAI(BaseChatOpenAI):
|
||||
|
||||
``method`` default changed from "function_calling" to "json_schema".
|
||||
|
||||
.. versionchanged:: 0.3.12
|
||||
Support for ``tools`` added.
|
||||
|
||||
.. versionchanged:: 0.3.21
|
||||
Pass ``kwargs`` through to the model.
|
||||
|
||||
.. dropdown:: Example: schema=Pydantic class, method="json_schema", include_raw=False, strict=True
|
||||
|
||||
Note, OpenAI has a number of restrictions on what types of schemas can be
|
||||
|
Reference in New Issue
Block a user