core: Add ruff rules D (docstring) (#29406)

This ensures that the code is properly documented:
https://docs.astral.sh/ruff/rules/#pydocstyle-d

Related to #21983
This commit is contained in:
Christophe Bornet
2025-04-01 19:15:45 +02:00
committed by GitHub
parent 64df60e690
commit 88b4233fa1
120 changed files with 1152 additions and 444 deletions

View File

@@ -1,3 +1,5 @@
"""Base class for prompt templates."""
from __future__ import annotations
import contextlib
@@ -98,6 +100,7 @@ class BasePromptTemplate(
@classmethod
def get_lc_namespace(cls) -> list[str]:
"""Get the namespace of the langchain object.
Returns ["langchain", "schema", "prompt_template"].
"""
return ["langchain", "schema", "prompt_template"]
@@ -105,6 +108,7 @@ class BasePromptTemplate(
@classmethod
def is_lc_serializable(cls) -> bool:
"""Return whether this class is serializable.
Returns True.
"""
return True
@@ -190,6 +194,7 @@ class BasePromptTemplate(
_inner_input = self._validate_input(inner_input)
return await self.aformat_prompt(**_inner_input)
@override
def invoke(
self, input: dict, config: Optional[RunnableConfig] = None, **kwargs: Any
) -> PromptValue:
@@ -215,6 +220,7 @@ class BasePromptTemplate(
serialized=self._serialized,
)
@override
async def ainvoke(
self, input: dict, config: Optional[RunnableConfig] = None, **kwargs: Any
) -> PromptValue:

View File

@@ -22,6 +22,7 @@ from pydantic import (
SkipValidation,
model_validator,
)
from typing_extensions import override
from langchain_core._api import deprecated
from langchain_core.load import Serializable
@@ -57,13 +58,17 @@ class BaseMessagePromptTemplate(Serializable, ABC):
@classmethod
def is_lc_serializable(cls) -> bool:
"""Return whether or not the class is serializable.
Returns: True.
"""
return True
@classmethod
def get_lc_namespace(cls) -> list[str]:
"""Get the namespace of the langchain object."""
"""Get the namespace of the langchain object.
Default namespace is ["langchain", "prompts", "chat"].
"""
return ["langchain", "prompts", "chat"]
@abstractmethod
@@ -79,7 +84,6 @@ class BaseMessagePromptTemplate(Serializable, ABC):
async def aformat_messages(self, **kwargs: Any) -> list[BaseMessage]:
"""Async format messages from kwargs.
Should return a list of BaseMessages.
Args:
**kwargs: Keyword arguments to use for formatting.
@@ -211,14 +215,18 @@ class MessagesPlaceholder(BaseMessagePromptTemplate):
"""Maximum number of messages to include. If None, then will include all.
Defaults to None."""
@classmethod
def get_lc_namespace(cls) -> list[str]:
"""Get the namespace of the langchain object."""
return ["langchain", "prompts", "chat"]
def __init__(
self, variable_name: str, *, optional: bool = False, **kwargs: Any
) -> None:
"""Create a messages placeholder.
Args:
variable_name: Name of variable to use as messages.
optional: If True format_messages can be called with no arguments and will
return an empty list. If False then a named argument with name
`variable_name` must be passed in, even if the value is an empty list.
Defaults to False.]
"""
# mypy can't detect the init which is defined in the parent class
# b/c these are BaseModel classes.
super().__init__( # type: ignore
@@ -294,11 +302,6 @@ class BaseStringMessagePromptTemplate(BaseMessagePromptTemplate, ABC):
additional_kwargs: dict = Field(default_factory=dict)
"""Additional keyword arguments to pass to the prompt template."""
@classmethod
def get_lc_namespace(cls) -> list[str]:
"""Get the namespace of the langchain object."""
return ["langchain", "prompts", "chat"]
@classmethod
def from_template(
cls: type[MessagePromptTemplateT],
@@ -424,11 +427,6 @@ class ChatMessagePromptTemplate(BaseStringMessagePromptTemplate):
role: str
"""Role of the message."""
@classmethod
def get_lc_namespace(cls) -> list[str]:
"""Get the namespace of the langchain object."""
return ["langchain", "prompts", "chat"]
def format(self, **kwargs: Any) -> BaseMessage:
"""Format the prompt template.
@@ -483,11 +481,6 @@ class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
_msg_class: type[BaseMessage]
@classmethod
def get_lc_namespace(cls) -> list[str]:
"""Get the namespace of the langchain object."""
return ["langchain", "prompts", "chat"]
@classmethod
def from_template(
cls: type[_StringImageMessagePromptTemplateT],
@@ -719,34 +712,22 @@ class AIMessagePromptTemplate(_StringImageMessagePromptTemplate):
_msg_class: type[BaseMessage] = AIMessage
@classmethod
def get_lc_namespace(cls) -> list[str]:
"""Get the namespace of the langchain object."""
return ["langchain", "prompts", "chat"]
class SystemMessagePromptTemplate(_StringImageMessagePromptTemplate):
"""System message prompt template.
This is a message that is not sent to the user.
"""
_msg_class: type[BaseMessage] = SystemMessage
@classmethod
def get_lc_namespace(cls) -> list[str]:
"""Get the namespace of the langchain object."""
return ["langchain", "prompts", "chat"]
class BaseChatPromptTemplate(BasePromptTemplate, ABC):
"""Base class for chat prompt templates."""
@property
@override
def lc_attributes(self) -> dict:
"""Return a list of attribute names that should be included in the
serialized kwargs. These attributes must be accepted by the
constructor.
"""
return {"input_variables": self.input_variables}
def format(self, **kwargs: Any) -> str:
@@ -968,8 +949,9 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
input_variables: A list of the names of the variables whose values are
required as inputs to the prompt.
optional_variables: A list of the names of the variables for placeholder
or MessagePlaceholder that are optional. These variables are auto inferred
from the prompt and user need not provide them.
or MessagePlaceholder that are optional.
These variables are auto inferred from the prompt and user need not
provide them.
partial_variables: A dictionary of the partial variables the prompt
template carries. Partial variables populate the template so that you
don't need to pass them in every time you call the prompt.

View File

@@ -1,3 +1,5 @@
"""Image prompt template for a multimodal model."""
from typing import Any
from pydantic import Field
@@ -21,6 +23,7 @@ class ImagePromptTemplate(BasePromptTemplate[ImageURL]):
Options are: 'f-string', 'mustache', 'jinja2'."""
def __init__(self, **kwargs: Any) -> None:
"""Create an image prompt template."""
if "input_variables" not in kwargs:
kwargs["input_variables"] = []

View File

@@ -1,3 +1,5 @@
"""[DEPRECATED] Pipeline prompt template."""
from typing import Any
from typing import Optional as Optional
@@ -23,7 +25,9 @@ def _get_inputs(inputs: dict, input_variables: list[str]) -> dict:
),
)
class PipelinePromptTemplate(BasePromptTemplate):
"""This has been deprecated in favor of chaining individual prompts together in your
"""[DEPRECATED] Pipeline prompt template.
This has been deprecated in favor of chaining individual prompts together in your
code. E.g. using a for loop, you could do:
.. code-block:: python

View File

@@ -7,6 +7,7 @@ from pathlib import Path
from typing import TYPE_CHECKING, Any, Optional, Union
from pydantic import BaseModel, model_validator
from typing_extensions import override
from langchain_core.prompts.string import (
DEFAULT_FORMATTER_MAPPING,
@@ -58,14 +59,15 @@ class PromptTemplate(StringPromptTemplate):
"""
@property
@override
def lc_attributes(self) -> dict[str, Any]:
return {
"template_format": self.template_format,
}
@classmethod
@override
def get_lc_namespace(cls) -> list[str]:
"""Get the namespace of the langchain object."""
return ["langchain", "prompts", "prompt"]
template: str
@@ -116,6 +118,7 @@ class PromptTemplate(StringPromptTemplate):
return values
@override
def get_input_schema(self, config: RunnableConfig | None = None) -> type[BaseModel]:
"""Get the input schema for the prompt.

View File

@@ -65,6 +65,7 @@ def jinja2_formatter(template: str, /, **kwargs: Any) -> str:
def validate_jinja2(template: str, input_variables: list[str]) -> None:
"""Validate that the input variables are valid for the template.
Issues a warning if missing or extra variables are found.
Args:

View File

@@ -1,3 +1,5 @@
"""Structured prompt template for a language model."""
from collections.abc import Iterator, Mapping, Sequence
from typing import (
Any,
@@ -7,6 +9,7 @@ from typing import (
)
from pydantic import BaseModel, Field
from typing_extensions import override
from langchain_core._api.beta_decorator import beta
from langchain_core.language_models.base import BaseLanguageModel
@@ -41,6 +44,14 @@ class StructuredPrompt(ChatPromptTemplate):
template_format: PromptTemplateFormat = "f-string",
**kwargs: Any,
) -> None:
"""Create a structured prompt template.
Args:
messages: sequence of messages.
schema_: schema for the structured prompt.
structured_output_kwargs: additional kwargs for structured output.
template_format: template format for the prompt.
"""
schema_ = schema_ or kwargs.pop("schema")
structured_output_kwargs = structured_output_kwargs or {}
for k in set(kwargs).difference(get_pydantic_field_names(self.__class__)):
@@ -107,6 +118,7 @@ class StructuredPrompt(ChatPromptTemplate):
"""
return cls(messages, schema, **kwargs)
@override
def __or__(
self,
other: Union[