langchain: deprecate create_structured_output_runnable (#20933)

This commit is contained in:
ccurme 2024-04-26 14:00:40 -04:00 committed by GitHub
parent 38eccab3ae
commit bf16cefd18
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 45 additions and 9 deletions

View File

@ -34,7 +34,7 @@ from langchain.chains.structured_output.base import (
__all__ = [
"get_openai_output_parser",
"create_openai_fn_runnable",
"create_structured_output_runnable",
"create_structured_output_runnable", # deprecated
"create_openai_fn_chain", # deprecated
"create_structured_output_chain", # deprecated
"PYTHON_TO_JSON_TYPES", # backwards compatibility
@ -144,7 +144,7 @@ def create_openai_fn_chain(
@deprecated(
since="0.1.1", removal="0.2.0", alternative="create_structured_output_runnable"
since="0.1.1", removal="0.2.0", alternative="ChatOpenAI.with_structured_output"
)
def create_structured_output_chain(
output_schema: Union[Dict[str, Any], Type[BaseModel]],

View File

@ -30,15 +30,15 @@ from langchain.output_parsers import (
@deprecated(
since="0.1.14",
message=(
"LangChain has introduced a method called `with_structured_output` that"
"is available on ChatModels capable of tool calling."
"LangChain has introduced a method called `with_structured_output` that "
"is available on ChatModels capable of tool calling. "
"You can read more about the method here: "
"https://python.langchain.com/docs/modules/model_io/chat/structured_output/"
"Please follow our extraction use case documentation for more guidelines"
"on how to do information extraction with LLMs."
"https://python.langchain.com/docs/use_cases/extraction/."
"https://python.langchain.com/docs/modules/model_io/chat/structured_output/ "
"Please follow our extraction use case documentation for more guidelines "
"on how to do information extraction with LLMs. "
"https://python.langchain.com/docs/use_cases/extraction/. "
"If you notice other issues, please provide "
"feedback here:"
"feedback here: "
"https://github.com/langchain-ai/langchain/discussions/18154"
),
removal="0.3.0",
@ -146,6 +146,42 @@ def create_openai_fn_runnable(
return llm.bind(**llm_kwargs_) | output_parser
@deprecated(
since="0.1.17",
message=(
"LangChain has introduced a method called `with_structured_output` that "
"is available on ChatModels capable of tool calling. "
"You can read more about the method here: "
"https://python.langchain.com/docs/modules/model_io/chat/structured_output/ "
"Please follow our extraction use case documentation for more guidelines "
"on how to do information extraction with LLMs. "
"https://python.langchain.com/docs/use_cases/extraction/. "
"If you notice other issues, please provide "
"feedback here: "
"https://github.com/langchain-ai/langchain/discussions/18154"
),
removal="0.3.0",
pending=True,
alternative=(
"""
from langchain_core.pydantic_v1 import BaseModel, Field
from langchain_anthropic import ChatAnthropic
class Joke(BaseModel):
setup: str = Field(description="The setup of the joke")
punchline: str = Field(description="The punchline to the joke")
# Or any other chat model that supports tools.
# Please reference to to the documentation of structured_output
# to see an up to date list of which models support
# with_structured_output.
model = ChatAnthropic(model="claude-3-opus-20240229", temperature=0)
structured_llm = model.with_structured_output(Joke)
structured_llm.invoke("Tell me a joke about cats.
Make sure to call the Joke function.")
"""
),
)
def create_structured_output_runnable(
output_schema: Union[Dict[str, Any], Type[BaseModel]],
llm: Runnable,