infra: add print rule to ruff (#16221)

Added noqa for existing prints. Can slowly remove / will prevent more
being intro'd
This commit is contained in:
Erick Friis
2024-02-09 16:13:30 -08:00
committed by GitHub
parent c07c0da01a
commit 3a2eb6e12b
246 changed files with 563 additions and 470 deletions

View File

@@ -1,4 +1,5 @@
"""Chat prompt template."""
from __future__ import annotations
from abc import ABC, abstractmethod
@@ -79,7 +80,7 @@ class BaseMessagePromptTemplate(Serializable, ABC):
raise NotImplementedError
def pretty_print(self) -> None:
print(self.pretty_repr(html=is_interactive_env()))
print(self.pretty_repr(html=is_interactive_env())) # noqa: T201
def __add__(self, other: Any) -> ChatPromptTemplate:
"""Combine two prompt templates.
@@ -543,7 +544,7 @@ class BaseChatPromptTemplate(BasePromptTemplate, ABC):
raise NotImplementedError
def pretty_print(self) -> None:
print(self.pretty_repr(html=is_interactive_env()))
print(self.pretty_repr(html=is_interactive_env())) # noqa: T201
MessageLike = Union[BaseMessagePromptTemplate, BaseMessage, BaseChatPromptTemplate]

View File

@@ -1,4 +1,5 @@
"""Prompt template that contains few shot examples."""
from __future__ import annotations
from pathlib import Path
@@ -277,7 +278,7 @@ class FewShotChatMessagePromptTemplate(
+ HumanMessagePromptTemplate.from_template("{input}")
)
# Show the prompt
print(final_prompt.format_messages(input="What's 3+3?"))
print(final_prompt.format_messages(input="What's 3+3?")) # noqa: T201
# Use within an LLM
from langchain_core.chat_models import ChatAnthropic

View File

@@ -1,4 +1,5 @@
"""BasePrompt schema definition."""
from __future__ import annotations
import warnings
@@ -174,4 +175,4 @@ class StringPromptTemplate(BasePromptTemplate, ABC):
return self.format(**dummy_vars)
def pretty_print(self) -> None:
print(self.pretty_repr(html=is_interactive_env()))
print(self.pretty_repr(html=is_interactive_env())) # noqa: T201