From 248be0225907521962f8377e942f329c7f1c4108 Mon Sep 17 00:00:00 2001 From: Bagatur <22008038+baskaryan@users.noreply.github.com> Date: Mon, 30 Sep 2024 11:47:46 -0700 Subject: [PATCH] core[patch]: fix structured prompt template format (#27003) template_format is an init argument on ChatPromptTemplate but not an attribute on the object so was getting shoved into StructuredPrompt.structured_ouptut_kwargs --- libs/core/langchain_core/prompts/structured.py | 3 +++ .../tests/unit_tests/prompts/test_structured.py | 13 ++++++++++++- .../tests/unit_tests/chat_models/test_base.py | 2 -- 3 files changed, 15 insertions(+), 3 deletions(-) diff --git a/libs/core/langchain_core/prompts/structured.py b/libs/core/langchain_core/prompts/structured.py index 56450c626ba..03fefcee021 100644 --- a/libs/core/langchain_core/prompts/structured.py +++ b/libs/core/langchain_core/prompts/structured.py @@ -2,6 +2,7 @@ from collections.abc import Iterator, Mapping, Sequence from typing import ( Any, Callable, + Literal, Optional, Union, ) @@ -37,6 +38,7 @@ class StructuredPrompt(ChatPromptTemplate): schema_: Optional[Union[dict, type[BaseModel]]] = None, *, structured_output_kwargs: Optional[dict[str, Any]] = None, + template_format: Literal["f-string", "mustache", "jinja2"] = "f-string", **kwargs: Any, ) -> None: schema_ = schema_ or kwargs.pop("schema") @@ -47,6 +49,7 @@ class StructuredPrompt(ChatPromptTemplate): messages=messages, schema_=schema_, structured_output_kwargs=structured_output_kwargs, + template_format=template_format, **kwargs, ) diff --git a/libs/core/tests/unit_tests/prompts/test_structured.py b/libs/core/tests/unit_tests/prompts/test_structured.py index 8b9d706be50..921ff68a0fe 100644 --- a/libs/core/tests/unit_tests/prompts/test_structured.py +++ b/libs/core/tests/unit_tests/prompts/test_structured.py @@ -1,13 +1,13 @@ from functools import partial from inspect import isclass from typing import Any, Union, cast -from typing import Optional as Optional from pydantic import BaseModel from langchain_core.language_models import FakeListChatModel from langchain_core.load.dump import dumps from langchain_core.load.load import loads +from langchain_core.messages import HumanMessage from langchain_core.prompts.structured import StructuredPrompt from langchain_core.runnables.base import Runnable, RunnableLambda from langchain_core.utils.pydantic import is_basemodel_subclass @@ -121,3 +121,14 @@ def test_structured_prompt_kwargs() -> None: chain = prompt | model assert chain.invoke({"hello": "there"}) == OutputSchema(name="yo", value=7) + + +def test_structured_prompt_template_format() -> None: + prompt = StructuredPrompt( + [("human", "hi {{person.name}}")], schema={}, template_format="mustache" + ) + assert prompt.messages[0].prompt.template_format == "mustache" # type: ignore[union-attr, union-attr] + assert prompt.input_variables == ["person"] + assert prompt.invoke({"person": {"name": "foo"}}).to_messages() == [ + HumanMessage("hi foo") + ] diff --git a/libs/langchain/tests/unit_tests/chat_models/test_base.py b/libs/langchain/tests/unit_tests/chat_models/test_base.py index 0b8bf012fd2..b82dead3099 100644 --- a/libs/langchain/tests/unit_tests/chat_models/test_base.py +++ b/libs/langchain/tests/unit_tests/chat_models/test_base.py @@ -126,8 +126,6 @@ def test_configurable() -> None: "tiktoken_model_name": None, "default_headers": None, "default_query": None, - "http_client": None, - "http_async_client": None, "stop": None, "extra_body": None, "include_response_headers": False,