mirror of
https://github.com/hwchase17/langchain.git
synced 2025-09-23 19:39:58 +00:00
chore(core): fix some docstrings (from DOC preview rule) (#32833)
* Add `Raises` sections * Add `Returns` sections * Add `Yields` sections --------- Co-authored-by: Mason Daugherty <mason@langchain.dev>
This commit is contained in:
committed by
GitHub
parent
4024d47412
commit
f4e83e0ad8
@@ -101,16 +101,14 @@ class BasePromptTemplate(
|
||||
def get_lc_namespace(cls) -> list[str]:
|
||||
"""Get the namespace of the langchain object.
|
||||
|
||||
Returns ["langchain", "schema", "prompt_template"].
|
||||
Returns:
|
||||
``["langchain", "schema", "prompt_template"]``
|
||||
"""
|
||||
return ["langchain", "schema", "prompt_template"]
|
||||
|
||||
@classmethod
|
||||
def is_lc_serializable(cls) -> bool:
|
||||
"""Return whether this class is serializable.
|
||||
|
||||
Returns True.
|
||||
"""
|
||||
"""Return True as this class is serializable."""
|
||||
return True
|
||||
|
||||
model_config = ConfigDict(
|
||||
@@ -341,9 +339,6 @@ class BasePromptTemplate(
|
||||
|
||||
Returns:
|
||||
Dict: Dictionary representation of the prompt.
|
||||
|
||||
Raises:
|
||||
NotImplementedError: If the prompt type is not implemented.
|
||||
"""
|
||||
prompt_dict = super().model_dump(**kwargs)
|
||||
with contextlib.suppress(NotImplementedError):
|
||||
|
@@ -740,10 +740,18 @@ class BaseChatPromptTemplate(BasePromptTemplate, ABC):
|
||||
|
||||
@abstractmethod
|
||||
def format_messages(self, **kwargs: Any) -> list[BaseMessage]:
|
||||
"""Format kwargs into a list of messages."""
|
||||
"""Format kwargs into a list of messages.
|
||||
|
||||
Returns:
|
||||
List of messages.
|
||||
"""
|
||||
|
||||
async def aformat_messages(self, **kwargs: Any) -> list[BaseMessage]:
|
||||
"""Async format kwargs into a list of messages."""
|
||||
"""Async format kwargs into a list of messages.
|
||||
|
||||
Returns:
|
||||
List of messages.
|
||||
"""
|
||||
return self.format_messages(**kwargs)
|
||||
|
||||
def pretty_repr(
|
||||
@@ -925,9 +933,6 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
|
||||
input_types: A dictionary of the types of the variables the prompt template
|
||||
expects. If not provided, all variables are assumed to be strings.
|
||||
|
||||
Returns:
|
||||
A chat prompt template.
|
||||
|
||||
Examples:
|
||||
Instantiation from a list of message templates:
|
||||
|
||||
@@ -981,7 +986,11 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
|
||||
|
||||
@classmethod
|
||||
def get_lc_namespace(cls) -> list[str]:
|
||||
"""Get the namespace of the langchain object."""
|
||||
"""Get the namespace of the langchain object.
|
||||
|
||||
Returns:
|
||||
``["langchain", "prompts", "chat"]``
|
||||
"""
|
||||
return ["langchain", "prompts", "chat"]
|
||||
|
||||
def __add__(self, other: Any) -> ChatPromptTemplate:
|
||||
@@ -1185,6 +1194,9 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
|
||||
**kwargs: keyword arguments to use for filling in template variables
|
||||
in all the template messages in this chat template.
|
||||
|
||||
Raises:
|
||||
ValueError: if messages are of unexpected types.
|
||||
|
||||
Returns:
|
||||
list of formatted messages.
|
||||
"""
|
||||
@@ -1295,7 +1307,13 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
|
||||
def __getitem__(
|
||||
self, index: Union[int, slice]
|
||||
) -> Union[MessageLike, ChatPromptTemplate]:
|
||||
"""Use to index into the chat template."""
|
||||
"""Use to index into the chat template.
|
||||
|
||||
Returns:
|
||||
If index is an int, returns the message at that index.
|
||||
If index is a slice, returns a new ``ChatPromptTemplate``
|
||||
containing the messages in that slice.
|
||||
"""
|
||||
if isinstance(index, slice):
|
||||
start, stop, step = index.indices(len(self.messages))
|
||||
messages = self.messages[start:stop:step]
|
||||
@@ -1303,7 +1321,7 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
|
||||
return self.messages[index]
|
||||
|
||||
def __len__(self) -> int:
|
||||
"""Get the length of the chat template."""
|
||||
"""Return the length of the chat template."""
|
||||
return len(self.messages)
|
||||
|
||||
@property
|
||||
|
@@ -31,18 +31,25 @@ class DictPromptTemplate(RunnableSerializable[dict, dict]):
|
||||
return _get_input_variables(self.template, self.template_format)
|
||||
|
||||
def format(self, **kwargs: Any) -> dict[str, Any]:
|
||||
"""Format the prompt with the inputs."""
|
||||
"""Format the prompt with the inputs.
|
||||
|
||||
Returns:
|
||||
A formatted dict.
|
||||
"""
|
||||
return _insert_input_variables(self.template, kwargs, self.template_format)
|
||||
|
||||
async def aformat(self, **kwargs: Any) -> dict[str, Any]:
|
||||
"""Format the prompt with the inputs."""
|
||||
"""Format the prompt with the inputs.
|
||||
|
||||
Returns:
|
||||
A formatted dict.
|
||||
"""
|
||||
return self.format(**kwargs)
|
||||
|
||||
@override
|
||||
def invoke(
|
||||
self, input: dict, config: Optional[RunnableConfig] = None, **kwargs: Any
|
||||
) -> dict:
|
||||
"""Invoke the prompt."""
|
||||
return self._call_with_config(
|
||||
lambda x: self.format(**x),
|
||||
input,
|
||||
@@ -62,15 +69,16 @@ class DictPromptTemplate(RunnableSerializable[dict, dict]):
|
||||
|
||||
@classmethod
|
||||
def is_lc_serializable(cls) -> bool:
|
||||
"""Return whether or not the class is serializable.
|
||||
|
||||
Returns: True.
|
||||
"""
|
||||
"""Return True as this class is serializable."""
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def get_lc_namespace(cls) -> list[str]:
|
||||
"""Serialization namespace."""
|
||||
"""Get the namespace of the langchain object.
|
||||
|
||||
Returns:
|
||||
``["langchain_core", "prompts", "dict"]``
|
||||
"""
|
||||
return ["langchain_core", "prompts", "dict"]
|
||||
|
||||
def pretty_repr(self, *, html: bool = False) -> str:
|
||||
|
@@ -117,7 +117,7 @@ class FewShotPromptTemplate(_FewShotPromptTemplateMixin, StringPromptTemplate):
|
||||
|
||||
@classmethod
|
||||
def is_lc_serializable(cls) -> bool:
|
||||
"""Return whether or not the class is serializable."""
|
||||
"""Return False as this class is not serializable."""
|
||||
return False
|
||||
|
||||
validate_template: bool = False
|
||||
@@ -367,7 +367,7 @@ class FewShotChatMessagePromptTemplate(
|
||||
|
||||
@classmethod
|
||||
def is_lc_serializable(cls) -> bool:
|
||||
"""Return whether or not the class is serializable."""
|
||||
"""Return False as this class is not serializable."""
|
||||
return False
|
||||
|
||||
model_config = ConfigDict(
|
||||
|
@@ -46,7 +46,11 @@ class FewShotPromptWithTemplates(StringPromptTemplate):
|
||||
|
||||
@classmethod
|
||||
def get_lc_namespace(cls) -> list[str]:
|
||||
"""Get the namespace of the langchain object."""
|
||||
"""Get the namespace of the langchain object.
|
||||
|
||||
Returns:
|
||||
``["langchain", "prompts", "few_shot_with_templates"]``
|
||||
"""
|
||||
return ["langchain", "prompts", "few_shot_with_templates"]
|
||||
|
||||
@model_validator(mode="before")
|
||||
|
@@ -23,7 +23,12 @@ class ImagePromptTemplate(BasePromptTemplate[ImageURL]):
|
||||
Options are: 'f-string', 'mustache', 'jinja2'."""
|
||||
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
"""Create an image prompt template."""
|
||||
"""Create an image prompt template.
|
||||
|
||||
Raises:
|
||||
ValueError: If the input variables contain ``'url'``, ``'path'``, or
|
||||
``'detail'``.
|
||||
"""
|
||||
if "input_variables" not in kwargs:
|
||||
kwargs["input_variables"] = []
|
||||
|
||||
@@ -44,7 +49,11 @@ class ImagePromptTemplate(BasePromptTemplate[ImageURL]):
|
||||
|
||||
@classmethod
|
||||
def get_lc_namespace(cls) -> list[str]:
|
||||
"""Get the namespace of the langchain object."""
|
||||
"""Get the namespace of the langchain object.
|
||||
|
||||
Returns:
|
||||
``["langchain", "prompts", "image"]``
|
||||
"""
|
||||
return ["langchain", "prompts", "image"]
|
||||
|
||||
def format_prompt(self, **kwargs: Any) -> PromptValue:
|
||||
@@ -84,6 +93,7 @@ class ImagePromptTemplate(BasePromptTemplate[ImageURL]):
|
||||
Raises:
|
||||
ValueError: If the url is not provided.
|
||||
ValueError: If the url is not a string.
|
||||
ValueError: If ``'path'`` is provided in the template or kwargs.
|
||||
|
||||
Example:
|
||||
|
||||
@@ -128,9 +138,6 @@ class ImagePromptTemplate(BasePromptTemplate[ImageURL]):
|
||||
|
||||
Returns:
|
||||
A formatted string.
|
||||
|
||||
Raises:
|
||||
ValueError: If the path or url is not a string.
|
||||
"""
|
||||
return await run_in_executor(None, self.format, **kwargs)
|
||||
|
||||
|
@@ -18,17 +18,15 @@ class BaseMessagePromptTemplate(Serializable, ABC):
|
||||
|
||||
@classmethod
|
||||
def is_lc_serializable(cls) -> bool:
|
||||
"""Return whether or not the class is serializable.
|
||||
|
||||
Returns: True.
|
||||
"""
|
||||
"""Return True as this class is serializable."""
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def get_lc_namespace(cls) -> list[str]:
|
||||
"""Get the namespace of the langchain object.
|
||||
|
||||
Default namespace is ["langchain", "prompts", "chat"].
|
||||
Returns:
|
||||
``["langchain", "prompts", "chat"]``
|
||||
"""
|
||||
return ["langchain", "prompts", "chat"]
|
||||
|
||||
|
@@ -55,7 +55,11 @@ class PipelinePromptTemplate(BasePromptTemplate):
|
||||
|
||||
@classmethod
|
||||
def get_lc_namespace(cls) -> list[str]:
|
||||
"""Get the namespace of the langchain object."""
|
||||
"""Get the namespace of the langchain object.
|
||||
|
||||
Returns:
|
||||
``["langchain", "prompts", "pipeline"]``
|
||||
"""
|
||||
return ["langchain", "prompts", "pipeline"]
|
||||
|
||||
@model_validator(mode="before")
|
||||
|
@@ -69,6 +69,11 @@ class PromptTemplate(StringPromptTemplate):
|
||||
@classmethod
|
||||
@override
|
||||
def get_lc_namespace(cls) -> list[str]:
|
||||
"""Get the namespace of the langchain object.
|
||||
|
||||
Returns:
|
||||
``["langchain", "prompts", "prompt"]``
|
||||
"""
|
||||
return ["langchain", "prompts", "prompt"]
|
||||
|
||||
template: str
|
||||
@@ -135,7 +140,16 @@ class PromptTemplate(StringPromptTemplate):
|
||||
return mustache_schema(self.template)
|
||||
|
||||
def __add__(self, other: Any) -> PromptTemplate:
|
||||
"""Override the + operator to allow for combining prompt templates."""
|
||||
"""Override the + operator to allow for combining prompt templates.
|
||||
|
||||
Raises:
|
||||
ValueError: If the template formats are not f-string or if there are
|
||||
conflicting partial variables.
|
||||
NotImplementedError: If the other object is not a ``PromptTemplate`` or str.
|
||||
|
||||
Returns:
|
||||
A new ``PromptTemplate`` that is the combination of the two.
|
||||
"""
|
||||
# Allow for easy combining
|
||||
if isinstance(other, PromptTemplate):
|
||||
if self.template_format != "f-string":
|
||||
|
@@ -268,7 +268,11 @@ class StringPromptTemplate(BasePromptTemplate, ABC):
|
||||
|
||||
@classmethod
|
||||
def get_lc_namespace(cls) -> list[str]:
|
||||
"""Get the namespace of the langchain object."""
|
||||
"""Get the namespace of the langchain object.
|
||||
|
||||
Returns:
|
||||
``["langchain", "prompts", "base"]``
|
||||
"""
|
||||
return ["langchain", "prompts", "base"]
|
||||
|
||||
def format_prompt(self, **kwargs: Any) -> PromptValue:
|
||||
|
@@ -68,8 +68,11 @@ class StructuredPrompt(ChatPromptTemplate):
|
||||
def get_lc_namespace(cls) -> list[str]:
|
||||
"""Get the namespace of the langchain object.
|
||||
|
||||
For example, if the class is `langchain.llms.openai.OpenAI`, then the
|
||||
namespace is ["langchain", "llms", "openai"]
|
||||
For example, if the class is ``langchain.llms.openai.OpenAI``, then the
|
||||
namespace is ``["langchain", "llms", "openai"]``
|
||||
|
||||
Returns:
|
||||
The namespace of the langchain object.
|
||||
"""
|
||||
return cls.__module__.split(".")
|
||||
|
||||
|
Reference in New Issue
Block a user