core: docstrings prompts (#23890)

Added missed docstrings. Formatted docstrings to the consistent form.
This commit is contained in:
Leonid Ganeline 2024-07-05 09:17:52 -07:00 committed by GitHub
parent 289960bc60
commit 1eca98ec56
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
10 changed files with 498 additions and 55 deletions

View File

@ -87,12 +87,14 @@ class BasePromptTemplate(
@classmethod
def get_lc_namespace(cls) -> List[str]:
"""Get the namespace of the langchain object."""
"""Get the namespace of the langchain object.
Returns ["langchain", "schema", "prompt_template"]."""
return ["langchain", "schema", "prompt_template"]
@classmethod
def is_lc_serializable(cls) -> bool:
"""Return whether this class is serializable."""
"""Return whether this class is serializable.
Returns True."""
return True
class Config:
@ -102,11 +104,20 @@ class BasePromptTemplate(
@property
def OutputType(self) -> Any:
"""Return the output type of the prompt."""
return Union[StringPromptValue, ChatPromptValueConcrete]
def get_input_schema(
self, config: Optional[RunnableConfig] = None
) -> Type[BaseModel]:
"""Get the input schema for the prompt.
Args:
config: RunnableConfig, configuration for the prompt.
Returns:
Type[BaseModel]: The input schema for the prompt.
"""
# This is correct, but pydantic typings/mypy don't think so.
required_input_variables = {
k: (self.input_types.get(k, str), ...) for k in self.input_variables
@ -151,6 +162,15 @@ class BasePromptTemplate(
def invoke(
self, input: Dict, config: Optional[RunnableConfig] = None
) -> PromptValue:
"""Invoke the prompt.
Args:
input: Dict, input to the prompt.
config: RunnableConfig, configuration for the prompt.
Returns:
PromptValue: The output of the prompt.
"""
config = ensure_config(config)
if self.metadata:
config["metadata"] = {**config["metadata"], **self.metadata}
@ -166,6 +186,15 @@ class BasePromptTemplate(
async def ainvoke(
self, input: Dict, config: Optional[RunnableConfig] = None, **kwargs: Any
) -> PromptValue:
"""Async invoke the prompt.
Args:
input: Dict, input to the prompt.
config: RunnableConfig, configuration for the prompt.
Returns:
PromptValue: The output of the prompt.
"""
config = ensure_config(config)
if self.metadata:
config["metadata"].update(self.metadata)
@ -180,14 +209,35 @@ class BasePromptTemplate(
@abstractmethod
def format_prompt(self, **kwargs: Any) -> PromptValue:
"""Create Prompt Value."""
"""Create Prompt Value.
Args:
kwargs: Any arguments to be passed to the prompt template.
Returns:
PromptValue: The output of the prompt.
"""
async def aformat_prompt(self, **kwargs: Any) -> PromptValue:
"""Create Prompt Value."""
"""Async create Prompt Value.
Args:
kwargs: Any arguments to be passed to the prompt template.
Returns:
PromptValue: The output of the prompt.
"""
return self.format_prompt(**kwargs)
def partial(self, **kwargs: Union[str, Callable[[], str]]) -> BasePromptTemplate:
"""Return a partial of the prompt template."""
"""Return a partial of the prompt template.
Args:
kwargs: Union[str, Callable[[], str], partial variables to set.
Returns:
BasePromptTemplate: A partial of the prompt template.
"""
prompt_dict = self.__dict__.copy()
prompt_dict["input_variables"] = list(
set(self.input_variables).difference(kwargs)
@ -220,7 +270,7 @@ class BasePromptTemplate(
"""
async def aformat(self, **kwargs: Any) -> FormatOutputType:
"""Format the prompt with the inputs.
"""Async format the prompt with the inputs.
Args:
kwargs: Any arguments to be passed to the prompt template.
@ -242,7 +292,17 @@ class BasePromptTemplate(
raise NotImplementedError
def dict(self, **kwargs: Any) -> Dict:
"""Return dictionary representation of prompt."""
"""Return dictionary representation of prompt.
Args:
kwargs: Any additional arguments to pass to the dictionary.
Returns:
Dict: Dictionary representation of the prompt.
Raises:
NotImplementedError: If the prompt type is not implemented.
"""
prompt_dict = super().dict(**kwargs)
try:
prompt_dict["_type"] = self._prompt_type
@ -256,6 +316,11 @@ class BasePromptTemplate(
Args:
file_path: Path to directory to save prompt to.
Raises:
ValueError: If the prompt has partial variables.
ValueError: If the file path is not json or yaml.
NotImplementedError: If the prompt type is not implemented.
Example:
.. code-block:: python
@ -308,7 +373,7 @@ def format_document(doc: Document, prompt: BasePromptTemplate[str]) -> str:
First, this pulls information from the document from two sources:
1. `page_content`:
1. page_content:
This takes the information from the `document.page_content`
and assigns it to a variable named `page_content`.
2. metadata:
@ -341,11 +406,11 @@ def format_document(doc: Document, prompt: BasePromptTemplate[str]) -> str:
async def aformat_document(doc: Document, prompt: BasePromptTemplate[str]) -> str:
"""Format a document into a string based on a prompt template.
"""Async format a document into a string based on a prompt template.
First, this pulls information from the document from two sources:
1. `page_content`:
1. page_content:
This takes the information from the `document.page_content`
and assigns it to a variable named `page_content`.
2. metadata:

View File

@ -48,7 +48,8 @@ class BaseMessagePromptTemplate(Serializable, ABC):
@classmethod
def is_lc_serializable(cls) -> bool:
"""Return whether or not the class is serializable."""
"""Return whether or not the class is serializable.
Returns: True"""
return True
@classmethod
@ -68,7 +69,8 @@ class BaseMessagePromptTemplate(Serializable, ABC):
"""
async def aformat_messages(self, **kwargs: Any) -> List[BaseMessage]:
"""Format messages from kwargs. Should return a list of BaseMessages.
"""Async format messages from kwargs.
Should return a list of BaseMessages.
Args:
**kwargs: Keyword arguments to use for formatting.
@ -88,10 +90,18 @@ class BaseMessagePromptTemplate(Serializable, ABC):
"""
def pretty_repr(self, html: bool = False) -> str:
"""Human-readable representation."""
"""Human-readable representation.
Args:
html: Whether to format as HTML. Defaults to False.
Returns:
Human-readable representation.
"""
raise NotImplementedError
def pretty_print(self) -> None:
"""Print a human-readable representation."""
print(self.pretty_repr(html=is_interactive_env())) # noqa: T201
def __add__(self, other: Any) -> ChatPromptTemplate:
@ -208,6 +218,9 @@ class MessagesPlaceholder(BaseMessagePromptTemplate):
Returns:
List of BaseMessage.
Raises:
ValueError: If variable is not a list of messages.
"""
value = (
kwargs.get(self.variable_name, [])
@ -234,6 +247,14 @@ class MessagesPlaceholder(BaseMessagePromptTemplate):
return [self.variable_name] if not self.optional else []
def pretty_repr(self, html: bool = False) -> str:
"""Human-readable representation.
Args:
html: Whether to format as HTML. Defaults to False.
Returns:
Human-readable representation.
"""
var = "{" + self.variable_name + "}"
if html:
title = get_msg_title_repr("Messages Placeholder", bold=True)
@ -274,12 +295,13 @@ class BaseStringMessagePromptTemplate(BaseMessagePromptTemplate, ABC):
Args:
template: a template.
template_format: format of the template.
template_format: format of the template. Defaults to "f-string".
partial_variables: A dictionary of variables that can be used to partially
fill in the template. For example, if the template is
`"{variable1} {variable2}"`, and `partial_variables` is
`{"variable1": "foo"}`, then the final prompt will be
`"foo {variable2}"`.
Defaults to None.
**kwargs: keyword arguments to pass to the constructor.
Returns:
@ -324,7 +346,7 @@ class BaseStringMessagePromptTemplate(BaseMessagePromptTemplate, ABC):
"""
async def aformat(self, **kwargs: Any) -> BaseMessage:
"""Format the prompt template.
"""Async format the prompt template.
Args:
**kwargs: Keyword arguments to use for formatting.
@ -346,6 +368,14 @@ class BaseStringMessagePromptTemplate(BaseMessagePromptTemplate, ABC):
return [self.format(**kwargs)]
async def aformat_messages(self, **kwargs: Any) -> List[BaseMessage]:
"""Async format messages from kwargs.
Args:
**kwargs: Keyword arguments to use for formatting.
Returns:
List of BaseMessages.
"""
return [await self.aformat(**kwargs)]
@property
@ -359,6 +389,14 @@ class BaseStringMessagePromptTemplate(BaseMessagePromptTemplate, ABC):
return self.prompt.input_variables
def pretty_repr(self, html: bool = False) -> str:
"""Human-readable representation.
Args:
html: Whether to format as HTML. Defaults to False.
Returns:
Human-readable representation.
"""
# TODO: Handle partials
title = self.__class__.__name__.replace("MessagePromptTemplate", " Message")
title = get_msg_title_repr(title, bold=html)
@ -391,6 +429,14 @@ class ChatMessagePromptTemplate(BaseStringMessagePromptTemplate):
)
async def aformat(self, **kwargs: Any) -> BaseMessage:
"""Async format the prompt template.
Args:
**kwargs: Keyword arguments to use for formatting.
Returns:
Formatted message.
"""
text = await self.prompt.aformat(**kwargs)
return ChatMessage(
content=text, role=self.role, additional_kwargs=self.additional_kwargs
@ -440,12 +486,16 @@ class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
Args:
template: a template.
template_format: format of the template.
template_format: format of the template. Defaults to "f-string".
partial_variables: A dictionary of variables that can be used too partially.
Defaults to None.
**kwargs: keyword arguments to pass to the constructor.
Returns:
A new instance of this class.
Raises:
ValueError: If the template is not a string or list of strings.
"""
if isinstance(template, str):
prompt: Union[StringPromptTemplate, List] = PromptTemplate.from_template(
@ -542,6 +592,14 @@ class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
return [self.format(**kwargs)]
async def aformat_messages(self, **kwargs: Any) -> List[BaseMessage]:
"""Async format messages from kwargs.
Args:
**kwargs: Keyword arguments to use for formatting.
Returns:
List of BaseMessages.
"""
return [await self.aformat(**kwargs)]
@property
@ -585,7 +643,7 @@ class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
)
async def aformat(self, **kwargs: Any) -> BaseMessage:
"""Format the prompt template.
"""Async format the prompt template.
Args:
**kwargs: Keyword arguments to use for formatting.
@ -613,6 +671,14 @@ class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
)
def pretty_repr(self, html: bool = False) -> str:
"""Human-readable representation.
Args:
html: Whether to format as HTML. Defaults to False.
Returns:
Human-readable representation.
"""
# TODO: Handle partials
title = self.__class__.__name__.replace("MessagePromptTemplate", " Message")
title = get_msg_title_repr(title, bold=html)
@ -671,25 +737,25 @@ class BaseChatPromptTemplate(BasePromptTemplate, ABC):
in all the template messages in this chat template.
Returns:
formatted string
formatted string.
"""
return self.format_prompt(**kwargs).to_string()
async def aformat(self, **kwargs: Any) -> str:
"""Format the chat template into a string.
"""Async format the chat template into a string.
Args:
**kwargs: keyword arguments to use for filling in template variables
in all the template messages in this chat template.
Returns:
formatted string
formatted string.
"""
return (await self.aformat_prompt(**kwargs)).to_string()
def format_prompt(self, **kwargs: Any) -> PromptValue:
"""
Format prompt. Should return a PromptValue.
"""Format prompt. Should return a PromptValue.
Args:
**kwargs: Keyword arguments to use for formatting.
@ -700,6 +766,14 @@ class BaseChatPromptTemplate(BasePromptTemplate, ABC):
return ChatPromptValue(messages=messages)
async def aformat_prompt(self, **kwargs: Any) -> PromptValue:
"""Async format prompt. Should return a PromptValue.
Args:
**kwargs: Keyword arguments to use for formatting.
Returns:
PromptValue.
"""
messages = await self.aformat_messages(**kwargs)
return ChatPromptValue(messages=messages)
@ -708,14 +782,22 @@ class BaseChatPromptTemplate(BasePromptTemplate, ABC):
"""Format kwargs into a list of messages."""
async def aformat_messages(self, **kwargs: Any) -> List[BaseMessage]:
"""Format kwargs into a list of messages."""
"""Async format kwargs into a list of messages."""
return self.format_messages(**kwargs)
def pretty_repr(self, html: bool = False) -> str:
"""Human-readable representation."""
"""Human-readable representation.
Args:
html: Whether to format as HTML. Defaults to False.
Returns:
Human-readable representation.
"""
raise NotImplementedError
def pretty_print(self) -> None:
"""Print a human-readable representation."""
print(self.pretty_repr(html=is_interactive_env())) # noqa: T201
@ -881,6 +963,9 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
Returns:
Validated values.
Raises:
ValueError: If input variables do not match.
"""
messages = values["messages"]
input_vars = set()
@ -947,7 +1032,7 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
string_messages: list of (role, template) tuples.
Returns:
a chat prompt template
a chat prompt template.
"""
return cls( # type: ignore[call-arg]
messages=[
@ -967,7 +1052,7 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
string_messages: list of (role class, template) tuples.
Returns:
a chat prompt template
a chat prompt template.
"""
return cls.from_messages(string_messages)
@ -1006,10 +1091,11 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
(1) BaseMessagePromptTemplate, (2) BaseMessage, (3) 2-tuple of
(message type, template); e.g., ("human", "{user_input}"),
(4) 2-tuple of (message class, template), (4) a string which is
shorthand for ("human", template); e.g., "{user_input}"
shorthand for ("human", template); e.g., "{user_input}".
template_format: format of the template. Defaults to "f-string".
Returns:
a chat prompt template
a chat prompt template.
"""
_messages = [
_convert_to_message(message, template_format) for message in messages
@ -1043,7 +1129,7 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
in all the template messages in this chat template.
Returns:
list of formatted messages
list of formatted messages.
"""
kwargs = self._merge_partial_and_user_variables(**kwargs)
result = []
@ -1060,14 +1146,17 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
return result
async def aformat_messages(self, **kwargs: Any) -> List[BaseMessage]:
"""Format the chat template into a list of finalized messages.
"""Async format the chat template into a list of finalized messages.
Args:
**kwargs: keyword arguments to use for filling in template variables
in all the template messages in this chat template.
Returns:
list of formatted messages
list of formatted messages.
Raises:
ValueError: If unexpected input.
"""
kwargs = self._merge_partial_and_user_variables(**kwargs)
result = []
@ -1120,7 +1209,7 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
return type(self)(**prompt_dict)
def append(self, message: MessageLikeRepresentation) -> None:
"""Append message to the end of the chat template.
"""Append a message to the end of the chat template.
Args:
message: representation of a message to append.
@ -1128,7 +1217,11 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
self.messages.append(_convert_to_message(message))
def extend(self, messages: Sequence[MessageLikeRepresentation]) -> None:
"""Extend the chat template with a sequence of messages."""
"""Extend the chat template with a sequence of messages.
Args:
messages: sequence of message representations to append.
"""
self.messages.extend([_convert_to_message(message) for message in messages])
@overload
@ -1154,7 +1247,7 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
@property
def _prompt_type(self) -> str:
"""Name of prompt type."""
"""Name of prompt type. Used for serialization."""
return "chat"
def save(self, file_path: Union[Path, str]) -> None:
@ -1166,6 +1259,14 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
raise NotImplementedError()
def pretty_repr(self, html: bool = False) -> str:
"""Human-readable representation.
Args:
html: Whether to format as HTML. Defaults to False.
Returns:
Human-readable representation.
"""
# TODO: handle partials
return "\n\n".join(msg.pretty_repr(html=html) for msg in self.messages)
@ -1180,9 +1281,13 @@ def _create_template_from_message_type(
Args:
message_type: str the type of the message template (e.g., "human", "ai", etc.)
template: str the template string.
template_format: format of the template. Defaults to "f-string".
Returns:
a message prompt template of the appropriate type.
Raises:
ValueError: If unexpected message type.
"""
if message_type in ("human", "user"):
message: BaseMessagePromptTemplate = HumanMessagePromptTemplate.from_template(
@ -1249,10 +1354,15 @@ def _convert_to_message(
- string: shorthand for ("human", template); e.g., "{user_input}"
Args:
message: a representation of a message in one of the supported formats
message: a representation of a message in one of the supported formats.
template_format: format of the template. Defaults to "f-string".
Returns:
an instance of a message or a message template
an instance of a message or a message template.
Raises:
ValueError: If unexpected message type.
ValueError: If 2-tuple does not have 2 elements.
"""
if isinstance(message, (BaseMessagePromptTemplate, BaseChatPromptTemplate)):
_message: Union[

View File

@ -40,7 +40,18 @@ class _FewShotPromptTemplateMixin(BaseModel):
@root_validator(pre=True)
def check_examples_and_selector(cls, values: Dict) -> Dict:
"""Check that one and only one of examples/example_selector are provided."""
"""Check that one and only one of examples/example_selector are provided.
Args:
values: The values to check.
Returns:
The values if they are valid.
Raises:
ValueError: If neither or both examples and example_selector are provided.
ValueError: If both examples and example_selector are provided.
"""
examples = values.get("examples", None)
example_selector = values.get("example_selector", None)
if examples and example_selector:
@ -63,6 +74,9 @@ class _FewShotPromptTemplateMixin(BaseModel):
Returns:
List of examples.
Raises:
ValueError: If neither examples nor example_selector are provided.
"""
if self.examples is not None:
return self.examples
@ -74,13 +88,16 @@ class _FewShotPromptTemplateMixin(BaseModel):
)
async def _aget_examples(self, **kwargs: Any) -> List[dict]:
"""Get the examples to use for formatting the prompt.
"""Async get the examples to use for formatting the prompt.
Args:
**kwargs: Keyword arguments to be passed to the example selector.
Returns:
List of examples.
Raises:
ValueError: If neither examples nor example_selector are provided.
"""
if self.examples is not None:
return self.examples
@ -144,6 +161,16 @@ class FewShotPromptTemplate(_FewShotPromptTemplateMixin, StringPromptTemplate):
arbitrary_types_allowed = True
def format(self, **kwargs: Any) -> str:
"""Format the prompt with inputs generating a string.
Use this method to generate a string representation of a prompt.
Args:
**kwargs: keyword arguments to use for formatting.
Returns:
A string representation of the prompt.
"""
kwargs = self._merge_partial_and_user_variables(**kwargs)
# Get the examples to use.
examples = self._get_examples(**kwargs)
@ -162,6 +189,16 @@ class FewShotPromptTemplate(_FewShotPromptTemplateMixin, StringPromptTemplate):
return DEFAULT_FORMATTER_MAPPING[self.template_format](template, **kwargs)
async def aformat(self, **kwargs: Any) -> str:
"""Async format the prompt with inputs generating a string.
Use this method to generate a string representation of a prompt.
Args:
**kwargs: keyword arguments to use for formatting.
Returns:
A string representation of the prompt.
"""
kwargs = self._merge_partial_and_user_variables(**kwargs)
# Get the examples to use.
examples = await self._aget_examples(**kwargs)
@ -185,6 +222,14 @@ class FewShotPromptTemplate(_FewShotPromptTemplateMixin, StringPromptTemplate):
return "few_shot"
def save(self, file_path: Union[Path, str]) -> None:
"""Save the prompt template to a file.
Args:
file_path: The path to save the prompt template to.
Raises:
ValueError: If example_selector is provided.
"""
if self.example_selector:
raise ValueError("Saving an example selector is not currently supported")
return super().save(file_path)
@ -343,7 +388,7 @@ class FewShotChatMessagePromptTemplate(
return messages
async def aformat_messages(self, **kwargs: Any) -> List[BaseMessage]:
"""Format kwargs into a list of messages.
"""Async format kwargs into a list of messages.
Args:
**kwargs: keyword arguments to use for filling in templates in messages.
@ -370,7 +415,7 @@ class FewShotChatMessagePromptTemplate(
Use this method to generate a string representation of a prompt consisting
of chat messages.
Useful for feeding into a string based completion language model or debugging.
Useful for feeding into a string-based completion language model or debugging.
Args:
**kwargs: keyword arguments to use for formatting.
@ -382,8 +427,29 @@ class FewShotChatMessagePromptTemplate(
return get_buffer_string(messages)
async def aformat(self, **kwargs: Any) -> str:
"""Async format the prompt with inputs generating a string.
Use this method to generate a string representation of a prompt consisting
of chat messages.
Useful for feeding into a string-based completion language model or debugging.
Args:
**kwargs: keyword arguments to use for formatting.
Returns:
A string representation of the prompt
"""
messages = await self.aformat_messages(**kwargs)
return get_buffer_string(messages)
def pretty_repr(self, html: bool = False) -> str:
"""Return a pretty representation of the prompt template.
Args:
html: Whether or not to return an HTML formatted string.
Returns:
A pretty representation of the prompt template.
"""
raise NotImplementedError()

View File

@ -156,6 +156,14 @@ class FewShotPromptWithTemplates(StringPromptTemplate):
return DEFAULT_FORMATTER_MAPPING[self.template_format](template, **kwargs)
async def aformat(self, **kwargs: Any) -> str:
"""Async format the prompt with the inputs.
Args:
kwargs: Any arguments to be passed to the prompt template.
Returns:
A formatted string.
"""
kwargs = self._merge_partial_and_user_variables(**kwargs)
# Get the examples to use.
examples = await self._aget_examples(**kwargs)
@ -197,6 +205,14 @@ class FewShotPromptWithTemplates(StringPromptTemplate):
return "few_shot_with_templates"
def save(self, file_path: Union[Path, str]) -> None:
"""Save the prompt to a file.
Args:
file_path: The path to save the prompt to.
Raises:
ValueError: If example_selector is provided.
"""
if self.example_selector:
raise ValueError("Saving an example selector is not currently supported")
return super().save(file_path)

View File

@ -37,9 +37,25 @@ class ImagePromptTemplate(BasePromptTemplate[ImageURL]):
return ["langchain", "prompts", "image"]
def format_prompt(self, **kwargs: Any) -> PromptValue:
"""Format the prompt with the inputs.
Args:
kwargs: Any arguments to be passed to the prompt template.
Returns:
A formatted string.
"""
return ImagePromptValue(image_url=self.format(**kwargs))
async def aformat_prompt(self, **kwargs: Any) -> PromptValue:
"""Async format the prompt with the inputs.
Args:
kwargs: Any arguments to be passed to the prompt template.
Returns:
A formatted string.
"""
return ImagePromptValue(image_url=await self.aformat(**kwargs))
def format(
@ -54,6 +70,10 @@ class ImagePromptTemplate(BasePromptTemplate[ImageURL]):
Returns:
A formatted string.
Raises:
ValueError: If the url or path is not provided.
ValueError: If the path or url is not a string.
Example:
.. code-block:: python
@ -84,7 +104,27 @@ class ImagePromptTemplate(BasePromptTemplate[ImageURL]):
return output
async def aformat(self, **kwargs: Any) -> ImageURL:
"""Async format the prompt with the inputs.
Args:
kwargs: Any arguments to be passed to the prompt template.
Returns:
A formatted string.
Raises:
ValueError: If the url or path is not provided.
ValueError: If the path or url is not a string.
"""
return await run_in_executor(None, self.format, **kwargs)
def pretty_repr(self, html: bool = False) -> str:
"""Return a pretty representation of the prompt.
Args:
html: Whether to return an html formatted string.
Returns:
A pretty representation of the prompt.
"""
raise NotImplementedError()

View File

@ -18,7 +18,17 @@ logger = logging.getLogger(__name__)
def load_prompt_from_config(config: dict) -> BasePromptTemplate:
"""Load prompt from Config Dict."""
"""Load prompt from Config Dict.
Args:
config: Dict containing the prompt configuration.
Returns:
A PromptTemplate object.
Raises:
ValueError: If the prompt type is not supported.
"""
if "_type" not in config:
logger.warning("No `_type` key found, defaulting to `prompt`.")
config_type = config.pop("_type", "prompt")
@ -128,7 +138,18 @@ def _load_prompt(config: dict) -> PromptTemplate:
def load_prompt(
path: Union[str, Path], encoding: Optional[str] = None
) -> BasePromptTemplate:
"""Unified method for loading a prompt from LangChainHub or local fs."""
"""Unified method for loading a prompt from LangChainHub or local fs.
Args:
path: Path to the prompt file.
encoding: Encoding of the file. Defaults to None.
Returns:
A PromptTemplate object.
Raises:
RuntimeError: If the path is a Lang Chain Hub path.
"""
if isinstance(path, str) and path.startswith("lc://"):
raise RuntimeError(
"Loading from the deprecated github-based Hub is no longer supported. "

View File

@ -14,6 +14,7 @@ class PipelinePromptTemplate(BasePromptTemplate):
"""Prompt template for composing multiple prompt templates together.
This can be useful when you want to reuse parts of prompts.
A PipelinePrompt consists of two main parts:
- final_prompt: This is the final prompt that is returned
- pipeline_prompts: This is a list of tuples, consisting
@ -45,6 +46,14 @@ class PipelinePromptTemplate(BasePromptTemplate):
return values
def format_prompt(self, **kwargs: Any) -> PromptValue:
"""Format the prompt with the inputs.
Args:
kwargs: Any arguments to be passed to the prompt template.
Returns:
A formatted string.
"""
for k, prompt in self.pipeline_prompts:
_inputs = _get_inputs(kwargs, prompt.input_variables)
if isinstance(prompt, BaseChatPromptTemplate):
@ -55,6 +64,14 @@ class PipelinePromptTemplate(BasePromptTemplate):
return self.final_prompt.format_prompt(**_inputs)
async def aformat_prompt(self, **kwargs: Any) -> PromptValue:
"""Async format the prompt with the inputs.
Args:
kwargs: Any arguments to be passed to the prompt template.
Returns:
A formatted string.
"""
for k, prompt in self.pipeline_prompts:
_inputs = _get_inputs(kwargs, prompt.input_variables)
if isinstance(prompt, BaseChatPromptTemplate):
@ -65,9 +82,25 @@ class PipelinePromptTemplate(BasePromptTemplate):
return await self.final_prompt.aformat_prompt(**_inputs)
def format(self, **kwargs: Any) -> str:
"""Format the prompt with the inputs.
Args:
kwargs: Any arguments to be passed to the prompt template.
Returns:
A formatted string.
"""
return self.format_prompt(**kwargs).to_string()
async def aformat(self, **kwargs: Any) -> str:
"""Async format the prompt with the inputs.
Args:
kwargs: Any arguments to be passed to the prompt template.
Returns:
A formatted string.
"""
return (await self.aformat_prompt(**kwargs)).to_string()
@property

View File

@ -25,7 +25,8 @@ class PromptTemplate(StringPromptTemplate):
The template can be formatted using either f-strings (default) or jinja2 syntax.
*Security warning*: Prefer using `template_format="f-string"` instead of
*Security warning*:
Prefer using `template_format="f-string"` instead of
`template_format="jinja2"`, or make sure to NEVER accept jinja2 templates
from untrusted sources as they may lead to arbitrary Python code execution.
@ -110,6 +111,14 @@ class PromptTemplate(StringPromptTemplate):
return values
def get_input_schema(self, config: RunnableConfig | None = None) -> type[BaseModel]:
"""Get the input schema for the prompt.
Args:
config: The runnable configuration.
Returns:
The input schema for the prompt.
"""
if self.template_format != "mustache":
return super().get_input_schema(config)
@ -158,6 +167,14 @@ class PromptTemplate(StringPromptTemplate):
return "prompt"
def format(self, **kwargs: Any) -> str:
"""Format the prompt with the inputs.
Args:
kwargs: Any arguments to be passed to the prompt template.
Returns:
A formatted string.
"""
kwargs = self._merge_partial_and_user_variables(**kwargs)
return DEFAULT_FORMATTER_MAPPING[self.template_format](self.template, **kwargs)
@ -204,7 +221,7 @@ class PromptTemplate(StringPromptTemplate):
Args:
template_file: The path to the file containing the prompt template.
input_variables: [DEPRECATED] A list of variable names the final prompt
template will expect.
template will expect. Defaults to None.
input_variables is ignored as from_file now delegates to from_template().
@ -230,7 +247,8 @@ class PromptTemplate(StringPromptTemplate):
) -> PromptTemplate:
"""Load a prompt template from a template.
*Security warning*: Prefer using `template_format="f-string"` instead of
*Security warning*:
Prefer using `template_format="f-string"` instead of
`template_format="jinja2"`, or make sure to NEVER accept jinja2 templates
from untrusted sources as they may lead to arbitrary Python code execution.
@ -239,18 +257,20 @@ class PromptTemplate(StringPromptTemplate):
be treated as a best-effort approach rather than a guarantee of security,
as it is an opt-out rather than opt-in approach.
Despite the sand-boxing, we recommend to never use jinja2 templates
Despite the sand-boxing, we recommend never using jinja2 templates
from untrusted sources.
Args:
template: The template to load.
template_format: The format of the template. Use `jinja2` for jinja2,
and `f-string` or None for f-strings.
Defaults to `f-string`.
partial_variables: A dictionary of variables that can be used to partially
fill in the template. For example, if the template is
`"{variable1} {variable2}"`, and `partial_variables` is
`{"variable1": "foo"}`, then the final prompt will be
`"foo {variable2}"`.
`"foo {variable2}"`. Defaults to None.
kwargs: Any other arguments to pass to the prompt template.
Returns:
The prompt template loaded from the template.

View File

@ -19,13 +19,24 @@ from langchain_core.utils.interactive_env import is_interactive_env
def jinja2_formatter(template: str, **kwargs: Any) -> str:
"""Format a template using jinja2.
*Security warning*: As of LangChain 0.0.329, this method uses Jinja2's
*Security warning*:
As of LangChain 0.0.329, this method uses Jinja2's
SandboxedEnvironment by default. However, this sand-boxing should
be treated as a best-effort approach rather than a guarantee of security.
Do not accept jinja2 templates from untrusted sources as they may lead
to arbitrary Python code execution.
https://jinja.palletsprojects.com/en/3.1.x/sandbox/
Args:
template: The template string.
**kwargs: The variables to format the template with.
Returns:
The formatted string.
Raises:
ImportError: If jinja2 is not installed.
"""
try:
from jinja2.sandbox import SandboxedEnvironment
@ -88,14 +99,29 @@ def _get_jinja2_variables_from_template(template: str) -> Set[str]:
def mustache_formatter(template: str, **kwargs: Any) -> str:
"""Format a template using mustache."""
"""Format a template using mustache.
Args:
template: The template string.
**kwargs: The variables to format the template with.
Returns:
The formatted string.
"""
return mustache.render(template, kwargs)
def mustache_template_vars(
template: str,
) -> Set[str]:
"""Get the variables from a mustache template."""
"""Get the variables from a mustache template.
Args:
template: The template string.
Returns:
The variables from the template.
"""
vars: Set[str] = set()
section_depth = 0
for type, key in mustache.tokenize(template):
@ -118,7 +144,14 @@ Defs = Dict[str, "Defs"]
def mustache_schema(
template: str,
) -> Type[BaseModel]:
"""Get the variables from a mustache template."""
"""Get the variables from a mustache template.
Args:
template: The template string.
Returns:
The variables from the template as a Pydantic model.
"""
fields = {}
prefix: Tuple[str, ...] = ()
section_stack: List[Tuple[str, ...]] = []
@ -178,6 +211,7 @@ def check_valid_template(
Raises:
ValueError: If the template format is not supported.
ValueError: If the prompt schema is invalid.
"""
try:
validator_func = DEFAULT_VALIDATOR_MAPPING[template_format]
@ -232,12 +266,36 @@ class StringPromptTemplate(BasePromptTemplate, ABC):
return ["langchain", "prompts", "base"]
def format_prompt(self, **kwargs: Any) -> PromptValue:
"""Format the prompt with the inputs.
Args:
kwargs: Any arguments to be passed to the prompt template.
Returns:
A formatted string.
"""
return StringPromptValue(text=self.format(**kwargs))
async def aformat_prompt(self, **kwargs: Any) -> PromptValue:
"""Async format the prompt with the inputs.
Args:
kwargs: Any arguments to be passed to the prompt template.
Returns:
A formatted string.
"""
return StringPromptValue(text=await self.aformat(**kwargs))
def pretty_repr(self, html: bool = False) -> str:
"""Get a pretty representation of the prompt.
Args:
html: Whether to return an HTML-formatted string.
Returns:
A pretty representation of the prompt.
"""
# TODO: handle partials
dummy_vars = {
input_var: "{" + f"{input_var}" + "}" for input_var in self.input_variables
@ -249,4 +307,5 @@ class StringPromptTemplate(BasePromptTemplate, ABC):
return self.format(**dummy_vars)
def pretty_print(self) -> None:
"""Print a pretty representation of the prompt."""
print(self.pretty_repr(html=is_interactive_env())) # noqa: T201

View File

@ -138,6 +138,19 @@ class StructuredPrompt(ChatPromptTemplate):
*others: Union[Runnable[Any, Other], Callable[[Any], Other]],
name: Optional[str] = None,
) -> RunnableSerializable[Dict, Other]:
"""Pipe the structured prompt to a language model.
Args:
others: The language model to pipe the structured prompt to.
name: The name of the pipeline. Defaults to None.
Returns:
A RunnableSequence object.
Raises:
NotImplementedError: If the first element of `others`
is not a language model.
"""
if (
others
and isinstance(others[0], BaseLanguageModel)