mirror of
https://github.com/hwchase17/langchain.git
synced 2025-08-31 10:23:18 +00:00
openai[patch]: update with_structured_outputs docstring (#31517)
Update docstrings
This commit is contained in:
@@ -809,6 +809,50 @@ class AzureChatOpenAI(BaseChatOpenAI):
|
||||
|
||||
Note: ``strict`` can only be non-null if ``method`` is
|
||||
``"json_schema"`` or ``"function_calling"``.
|
||||
tools:
|
||||
A list of tool-like objects to bind to the chat model. Requires that:
|
||||
|
||||
- ``method`` is ``"json_schema"`` (default).
|
||||
- ``strict=True``
|
||||
- ``include_raw=True``
|
||||
|
||||
If a model elects to call a
|
||||
tool, the resulting ``AIMessage`` in ``"raw"`` will include tool calls.
|
||||
|
||||
.. dropdown:: Example
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from langchain.chat_models import init_chat_model
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class ResponseSchema(BaseModel):
|
||||
response: str
|
||||
|
||||
|
||||
def get_weather(location: str) -> str:
|
||||
\"\"\"Get weather at a location.\"\"\"
|
||||
pass
|
||||
|
||||
llm = init_chat_model("openai:gpt-4o-mini")
|
||||
|
||||
structured_llm = llm.with_structured_output(
|
||||
ResponseSchema,
|
||||
tools=[get_weather],
|
||||
strict=True,
|
||||
include_raw=True,
|
||||
)
|
||||
|
||||
structured_llm.invoke("What's the weather in Boston?")
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
{
|
||||
"raw": AIMessage(content="", tool_calls=[...], ...),
|
||||
"parsing_error": None,
|
||||
"parsed": None,
|
||||
}
|
||||
|
||||
kwargs: Additional keyword args aren't supported.
|
||||
|
||||
|
@@ -2552,6 +2552,50 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override]
|
||||
|
||||
Note: ``strict`` can only be non-null if ``method`` is
|
||||
``"json_schema"`` or ``"function_calling"``.
|
||||
tools:
|
||||
A list of tool-like objects to bind to the chat model. Requires that:
|
||||
|
||||
- ``method`` is ``"json_schema"`` (default).
|
||||
- ``strict=True``
|
||||
- ``include_raw=True``
|
||||
|
||||
If a model elects to call a
|
||||
tool, the resulting ``AIMessage`` in ``"raw"`` will include tool calls.
|
||||
|
||||
.. dropdown:: Example
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from langchain.chat_models import init_chat_model
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class ResponseSchema(BaseModel):
|
||||
response: str
|
||||
|
||||
|
||||
def get_weather(location: str) -> str:
|
||||
\"\"\"Get weather at a location.\"\"\"
|
||||
pass
|
||||
|
||||
llm = init_chat_model("openai:gpt-4o-mini")
|
||||
|
||||
structured_llm = llm.with_structured_output(
|
||||
ResponseSchema,
|
||||
tools=[get_weather],
|
||||
strict=True,
|
||||
include_raw=True,
|
||||
)
|
||||
|
||||
structured_llm.invoke("What's the weather in Boston?")
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
{
|
||||
"raw": AIMessage(content="", tool_calls=[...], ...),
|
||||
"parsing_error": None,
|
||||
"parsed": None,
|
||||
}
|
||||
|
||||
kwargs: Additional keyword args aren't supported.
|
||||
|
||||
|
Reference in New Issue
Block a user