docs, multiple: de-beta with_structured_output (#20850)

This commit is contained in:
Erick Friis
2024-04-24 12:34:57 -07:00
committed by GitHub
parent 477eb1745c
commit 8c95ac3145
7 changed files with 60 additions and 67 deletions

View File

@@ -19,7 +19,7 @@ from typing import (
from typing_extensions import TypeAlias
from langchain_core._api import beta, deprecated
from langchain_core._api import deprecated
from langchain_core.messages import (
AnyMessage,
BaseMessage,
@@ -201,7 +201,6 @@ class BaseLanguageModel(
prompt and additional model provider-specific output.
"""
@beta()
def with_structured_output(
self, schema: Union[Dict, Type[BaseModel]], **kwargs: Any
) -> Runnable[LanguageModelInput, Union[Dict, BaseModel]]:

View File

@@ -580,7 +580,6 @@ class ChatAnthropic(BaseChatModel):
formatted_tools = [convert_to_anthropic_tool(tool) for tool in tools]
return self.bind(tools=formatted_tools, **kwargs)
@beta()
def with_structured_output(
self,
schema: Union[Dict, Type[BaseModel]],

View File

@@ -24,7 +24,6 @@ from typing import (
)
from fireworks.client import AsyncFireworks, Fireworks # type: ignore
from langchain_core._api import beta
from langchain_core.callbacks import (
AsyncCallbackManagerForLLMRun,
CallbackManagerForLLMRun,
@@ -671,7 +670,6 @@ class ChatFireworks(BaseChatModel):
kwargs["tool_choice"] = tool_choice
return super().bind(tools=formatted_tools, **kwargs)
@beta()
def with_structured_output(
self,
schema: Optional[Union[Dict, Type[BaseModel]]] = None,

View File

@@ -23,7 +23,6 @@ from typing import (
cast,
)
from langchain_core._api import beta
from langchain_core.callbacks import (
AsyncCallbackManagerForLLMRun,
CallbackManagerForLLMRun,
@@ -595,7 +594,6 @@ class ChatGroq(BaseChatModel):
kwargs["tool_choice"] = tool_choice
return super().bind(tools=formatted_tools, **kwargs)
@beta()
def with_structured_output(
self,
schema: Optional[Union[Dict, Type[BaseModel]]] = None,

View File

@@ -22,7 +22,6 @@ from typing import (
import httpx
from httpx_sse import EventSource, aconnect_sse, connect_sse
from langchain_core._api import beta
from langchain_core.callbacks import (
AsyncCallbackManagerForLLMRun,
CallbackManagerForLLMRun,
@@ -588,7 +587,6 @@ class ChatMistralAI(BaseChatModel):
formatted_tools = [convert_to_openai_tool(tool) for tool in tools]
return super().bind(tools=formatted_tools, **kwargs)
@beta()
def with_structured_output(
self,
schema: Union[Dict, Type[BaseModel]],

View File

@@ -29,7 +29,6 @@ from typing import (
import openai
import tiktoken
from langchain_core._api import beta
from langchain_core.callbacks import (
AsyncCallbackManagerForLLMRun,
CallbackManagerForLLMRun,
@@ -885,7 +884,6 @@ class ChatOpenAI(BaseChatModel):
) -> Runnable[LanguageModelInput, _DictOrPydantic]:
...
@beta()
def with_structured_output(
self,
schema: Optional[_DictOrPydanticClass] = None,