mirror of
https://github.com/hwchase17/langchain.git
synced 2025-07-10 15:06:18 +00:00
ruff: add bugbear across packages (#31917)
WIP, other packages will get in next PRs
This commit is contained in:
parent
5b3e29f809
commit
ae210c1590
@ -134,19 +134,22 @@ def add(
|
||||
typer.Argument(help="The dependency to add"),
|
||||
] = None,
|
||||
*,
|
||||
api_path: Annotated[list[str], typer.Option(help="API paths to add")] = [],
|
||||
api_path: Annotated[
|
||||
Optional[list[str]],
|
||||
typer.Option(help="API paths to add"),
|
||||
] = None,
|
||||
project_dir: Annotated[
|
||||
Optional[Path],
|
||||
typer.Option(help="The project directory"),
|
||||
] = None,
|
||||
repo: Annotated[
|
||||
list[str],
|
||||
Optional[list[str]],
|
||||
typer.Option(help="Install templates from a specific github repo instead"),
|
||||
] = [],
|
||||
] = None,
|
||||
branch: Annotated[
|
||||
list[str],
|
||||
Optional[list[str]],
|
||||
typer.Option(help="Install templates from a specific branch"),
|
||||
] = [],
|
||||
] = None,
|
||||
pip: Annotated[
|
||||
bool,
|
||||
typer.Option(
|
||||
@ -163,6 +166,12 @@ def add(
|
||||
langchain app add extraction-openai-functions
|
||||
langchain app add git+ssh://git@github.com/efriis/simple-pirate.git
|
||||
"""
|
||||
if branch is None:
|
||||
branch = []
|
||||
if repo is None:
|
||||
repo = []
|
||||
if api_path is None:
|
||||
api_path = []
|
||||
if not branch and not repo:
|
||||
warnings.warn(
|
||||
"Adding templates from the default branch and repo is deprecated."
|
||||
|
@ -90,7 +90,7 @@ def new(
|
||||
replacements = _process_name(name)
|
||||
except ValueError as e:
|
||||
typer.echo(e)
|
||||
raise typer.Exit(code=1)
|
||||
raise typer.Exit(code=1) from None
|
||||
|
||||
if name_class:
|
||||
if not re.match(r"^[A-Z][a-zA-Z0-9]*$", name_class):
|
||||
|
@ -65,7 +65,7 @@ def parse_dependency_string(
|
||||
else:
|
||||
_, post_slash = find_slash.split("/", 1)
|
||||
if "@" in post_slash or "#" in post_slash:
|
||||
_, ref = re.split(r"[@#]", post_slash, 1)
|
||||
_, ref = re.split(r"[@#]", post_slash, maxsplit=1)
|
||||
|
||||
# gitstring is everything before that
|
||||
gitstring = gitstring[: -len(ref) - 1] if ref is not None else gitstring
|
||||
@ -159,7 +159,7 @@ def _get_repo_path(gitstring: str, ref: Optional[str], repo_dir: Path) -> Path:
|
||||
hashed = hashlib.sha256((f"{gitstring}:{ref_str}").encode()).hexdigest()[:8]
|
||||
|
||||
removed_protocol = gitstring.split("://")[-1]
|
||||
removed_basename = re.split(r"[/:]", removed_protocol, 1)[-1]
|
||||
removed_basename = re.split(r"[/:]", removed_protocol, maxsplit=1)[-1]
|
||||
removed_extras = removed_basename.split("#")[0]
|
||||
foldername = re.sub(r"\W", "_", removed_extras)
|
||||
|
||||
|
@ -50,6 +50,7 @@ exclude = [
|
||||
[tool.ruff.lint]
|
||||
select = [
|
||||
"A", # flake8-builtins
|
||||
"B", # flake8-bugbear
|
||||
"ARG", # flake8-unused-arguments
|
||||
"ASYNC", # flake8-async
|
||||
"C4", # flake8-comprehensions
|
||||
@ -96,6 +97,7 @@ ignore = [
|
||||
"D105", # pydocstyle: Missing docstring in magic method
|
||||
"D107", # pydocstyle: Missing docstring in __init__
|
||||
"D407", # pydocstyle: Missing-dashed-underline-after-section
|
||||
"COM812", # Messes with the formatter
|
||||
]
|
||||
pyupgrade.keep-runtime-typing = true
|
||||
|
||||
|
@ -29,12 +29,12 @@ def _warn_on_import(name: str, replacement: Optional[str] = None) -> None:
|
||||
warnings.warn(
|
||||
f"Importing {name} from langchain root module is no longer supported. "
|
||||
f"Please use {replacement} instead.",
|
||||
stacklevel=3,
|
||||
stacklevel=2,
|
||||
)
|
||||
else:
|
||||
warnings.warn(
|
||||
f"Importing {name} from langchain root module is no longer supported.",
|
||||
stacklevel=3,
|
||||
stacklevel=2,
|
||||
)
|
||||
|
||||
|
||||
|
@ -255,7 +255,7 @@ class Chain(RunnableSerializable[dict[str, Any], dict[str, Any]], ABC):
|
||||
warnings.warn(
|
||||
"callback_manager is deprecated. Please use callbacks instead.",
|
||||
DeprecationWarning,
|
||||
stacklevel=4,
|
||||
stacklevel=2,
|
||||
)
|
||||
values["callbacks"] = values.pop("callback_manager", None)
|
||||
return values
|
||||
|
@ -504,7 +504,7 @@ class ChatVectorDBChain(BaseConversationalRetrievalChain):
|
||||
warnings.warn(
|
||||
"`ChatVectorDBChain` is deprecated - "
|
||||
"please use `from langchain.chains import ConversationalRetrievalChain`",
|
||||
stacklevel=4,
|
||||
stacklevel=2,
|
||||
)
|
||||
return values
|
||||
|
||||
|
@ -112,7 +112,7 @@ class LLMCheckerChain(Chain):
|
||||
"Directly instantiating an LLMCheckerChain with an llm is deprecated. "
|
||||
"Please instantiate with question_to_checked_assertions_chain "
|
||||
"or using the from_llm class method.",
|
||||
stacklevel=5,
|
||||
stacklevel=2,
|
||||
)
|
||||
if (
|
||||
"question_to_checked_assertions_chain" not in values
|
||||
|
@ -177,7 +177,7 @@ class LLMMathChain(Chain):
|
||||
"Directly instantiating an LLMMathChain with an llm is deprecated. "
|
||||
"Please instantiate with llm_chain argument or using the from_llm "
|
||||
"class method.",
|
||||
stacklevel=5,
|
||||
stacklevel=2,
|
||||
)
|
||||
if "llm_chain" not in values and values["llm"] is not None:
|
||||
prompt = values.get("prompt", PROMPT)
|
||||
|
@ -118,7 +118,7 @@ class LLMSummarizationCheckerChain(Chain):
|
||||
"Directly instantiating an LLMSummarizationCheckerChain with an llm is "
|
||||
"deprecated. Please instantiate with"
|
||||
" sequential_chain argument or using the from_llm class method.",
|
||||
stacklevel=5,
|
||||
stacklevel=2,
|
||||
)
|
||||
if "sequential_chain" not in values and values["llm"] is not None:
|
||||
values["sequential_chain"] = _load_sequential_chain(
|
||||
|
@ -74,7 +74,7 @@ class NatBotChain(Chain):
|
||||
"Directly instantiating an NatBotChain with an llm is deprecated. "
|
||||
"Please instantiate with llm_chain argument or using the from_llm "
|
||||
"class method.",
|
||||
stacklevel=5,
|
||||
stacklevel=2,
|
||||
)
|
||||
if "llm_chain" not in values and values["llm"] is not None:
|
||||
values["llm_chain"] = PROMPT | values["llm"] | StrOutputParser()
|
||||
|
@ -77,7 +77,7 @@ class VectorDBQAWithSourcesChain(BaseQAWithSourcesChain):
|
||||
warnings.warn(
|
||||
"`VectorDBQAWithSourcesChain` is deprecated - "
|
||||
"please use `from langchain.chains import RetrievalQAWithSourcesChain`",
|
||||
stacklevel=5,
|
||||
stacklevel=2,
|
||||
)
|
||||
return values
|
||||
|
||||
|
@ -162,7 +162,7 @@ class QueryTransformer(Transformer):
|
||||
warnings.warn(
|
||||
"Dates are expected to be provided in ISO 8601 date format "
|
||||
"(YYYY-MM-DD).",
|
||||
stacklevel=3,
|
||||
stacklevel=2,
|
||||
)
|
||||
return {"date": item, "type": "date"}
|
||||
|
||||
|
@ -124,12 +124,12 @@ class _EvalArgsMixin:
|
||||
msg = f"{self.__class__.__name__} requires an input string."
|
||||
raise ValueError(msg)
|
||||
if input_ is not None and not self.requires_input:
|
||||
warn(self._skip_input_warning, stacklevel=3)
|
||||
warn(self._skip_input_warning, stacklevel=2)
|
||||
if self.requires_reference and reference is None:
|
||||
msg = f"{self.__class__.__name__} requires a reference string."
|
||||
raise ValueError(msg)
|
||||
if reference is not None and not self.requires_reference:
|
||||
warn(self._skip_reference_warning, stacklevel=3)
|
||||
warn(self._skip_reference_warning, stacklevel=2)
|
||||
|
||||
|
||||
class StringEvaluator(_EvalArgsMixin, ABC):
|
||||
|
@ -183,7 +183,7 @@ def _get_in_memory_vectorstore() -> type[VectorStore]:
|
||||
"Using InMemoryVectorStore as the default vectorstore."
|
||||
"This memory store won't persist data. You should explicitly"
|
||||
"specify a vectorstore when using VectorstoreIndexCreator",
|
||||
stacklevel=3,
|
||||
stacklevel=2,
|
||||
)
|
||||
return InMemoryVectorStore
|
||||
|
||||
|
@ -58,7 +58,7 @@ class BaseChatMemory(BaseMemory, ABC):
|
||||
f"'{self.__class__.__name__}' got multiple output keys:"
|
||||
f" {outputs.keys()}. The default 'output' key is being used."
|
||||
f" If this is not desired, please manually set 'output_key'.",
|
||||
stacklevel=3,
|
||||
stacklevel=2,
|
||||
)
|
||||
else:
|
||||
msg = (
|
||||
|
@ -42,7 +42,7 @@ class CombinedMemory(BaseMemory):
|
||||
"When using CombinedMemory, "
|
||||
"input keys should be so the input is known. "
|
||||
f" Was not set on {val}",
|
||||
stacklevel=5,
|
||||
stacklevel=2,
|
||||
)
|
||||
return value
|
||||
|
||||
|
@ -27,12 +27,12 @@ def _get_builtin_translator(vectorstore: VectorStore) -> Visitor:
|
||||
"""Get the translator class corresponding to the vector store class."""
|
||||
try:
|
||||
import langchain_community # noqa: F401
|
||||
except ImportError as e:
|
||||
except ImportError as err:
|
||||
msg = (
|
||||
"The langchain-community package must be installed to use this feature."
|
||||
" Please install it using `pip install langchain-community`."
|
||||
)
|
||||
raise ImportError(msg) from e
|
||||
raise ImportError(msg) from err
|
||||
|
||||
from langchain_community.query_constructors.astradb import AstraDBTranslator
|
||||
from langchain_community.query_constructors.chroma import ChromaTranslator
|
||||
|
@ -350,7 +350,7 @@ def _validate_example_inputs_for_language_model(
|
||||
except InputFormatError:
|
||||
try:
|
||||
_get_messages(first_example.inputs or {})
|
||||
except InputFormatError as e:
|
||||
except InputFormatError as err2:
|
||||
msg = (
|
||||
"Example inputs do not match language model input format. "
|
||||
"Expected a dictionary with messages or a single prompt."
|
||||
@ -359,7 +359,7 @@ def _validate_example_inputs_for_language_model(
|
||||
" to convert the example.inputs to a compatible format"
|
||||
" for the llm or chat model you wish to evaluate."
|
||||
)
|
||||
raise InputFormatError(msg) from e
|
||||
raise InputFormatError(msg) from err2
|
||||
|
||||
|
||||
def _validate_example_inputs_for_chain(
|
||||
|
@ -78,8 +78,8 @@ def __getattr__(name: str) -> Any:
|
||||
f"`from langchain_community.tools import {name}`.\n\n"
|
||||
"To install langchain-community run "
|
||||
"`pip install -U langchain-community`.",
|
||||
category=LangChainDeprecationWarning,
|
||||
stacklevel=2,
|
||||
category=LangChainDeprecationWarning,
|
||||
)
|
||||
|
||||
return getattr(tools, name)
|
||||
|
@ -145,6 +145,7 @@ ignore-words-list = "momento,collison,ned,foor,reworkd,parth,whats,aapply,mysogy
|
||||
[tool.ruff.lint]
|
||||
select = [
|
||||
"A", # flake8-builtins
|
||||
"B", # flake8-bugbear
|
||||
"ASYNC", # flake8-async
|
||||
"B", # flake8-bugbear
|
||||
"C4", # flake8-comprehensions
|
||||
@ -195,6 +196,7 @@ ignore = [
|
||||
"S112", # Rarely useful
|
||||
"RUF012", # Doesn't play well with Pydantic
|
||||
"SLF001", # Private member access
|
||||
"UP007", # pyupgrade: non-pep604-annotation-union
|
||||
]
|
||||
|
||||
pydocstyle.convention = "google"
|
||||
|
@ -314,7 +314,7 @@ def _format_messages(
|
||||
system: Union[str, list[dict], None] = None
|
||||
formatted_messages: list[dict] = []
|
||||
merged_messages = _merge_messages(messages)
|
||||
for i, message in enumerate(merged_messages):
|
||||
for _i, message in enumerate(merged_messages):
|
||||
if message.type == "system":
|
||||
if system is not None:
|
||||
msg = "Received multiple non-consecutive system messages."
|
||||
@ -496,7 +496,7 @@ def _handle_anthropic_bad_request(e: anthropic.BadRequestError) -> None:
|
||||
"""Handle Anthropic BadRequestError."""
|
||||
if ("messages: at least one message is required") in e.message:
|
||||
message = "Received only system message(s). "
|
||||
warnings.warn(message)
|
||||
warnings.warn(message, stacklevel=2)
|
||||
raise e
|
||||
raise
|
||||
|
||||
@ -1558,7 +1558,7 @@ class ChatAnthropic(BaseChatModel):
|
||||
"generated. Consider disabling `thinking` or adjust your prompt to ensure "
|
||||
"the tool is called."
|
||||
)
|
||||
warnings.warn(thinking_admonition)
|
||||
warnings.warn(thinking_admonition, stacklevel=2)
|
||||
llm = self.bind_tools(
|
||||
[schema],
|
||||
ls_structured_output_format={
|
||||
@ -2120,7 +2120,7 @@ def _make_message_chunk_from_anthropic_event(
|
||||
)
|
||||
):
|
||||
if coerce_content_to_string:
|
||||
warnings.warn("Received unexpected tool content block.")
|
||||
warnings.warn("Received unexpected tool content block.", stacklevel=2)
|
||||
content_block = event.content_block.model_dump()
|
||||
content_block["index"] = event.index
|
||||
if event.content_block.type == "tool_use":
|
||||
|
@ -170,6 +170,7 @@ class AnthropicLLM(LLM, _AnthropicCommon):
|
||||
"This Anthropic LLM is deprecated. "
|
||||
"Please use `from langchain_anthropic import ChatAnthropic` "
|
||||
"instead",
|
||||
stacklevel=2,
|
||||
)
|
||||
return values
|
||||
|
||||
|
@ -62,6 +62,7 @@ target-version = "py39"
|
||||
[tool.ruff.lint]
|
||||
select = [
|
||||
"A", # flake8-builtins
|
||||
"B", # flake8-bugbear
|
||||
"ASYNC", # flake8-async
|
||||
"C4", # flake8-comprehensions
|
||||
"COM", # flake8-commas
|
||||
|
@ -1210,6 +1210,5 @@ def test_search_result_top_level() -> None:
|
||||
|
||||
def test_async_shared_client() -> None:
|
||||
llm = ChatAnthropic(model="claude-3-5-haiku-latest")
|
||||
llm._async_client # Instantiates lazily
|
||||
_ = asyncio.run(llm.ainvoke("Hello"))
|
||||
_ = asyncio.run(llm.ainvoke("Hello"))
|
||||
|
@ -479,7 +479,7 @@ class Chroma(VectorStore):
|
||||
"Try filtering complex metadata using "
|
||||
"langchain_community.vectorstores.utils.filter_complex_metadata."
|
||||
)
|
||||
raise ValueError(e.args[0] + "\n\n" + msg)
|
||||
raise ValueError(e.args[0] + "\n\n" + msg) from e
|
||||
raise e
|
||||
if empty_ids:
|
||||
images_without_metadatas = [b64_texts[j] for j in empty_ids]
|
||||
@ -566,7 +566,7 @@ class Chroma(VectorStore):
|
||||
"Try filtering complex metadata from the document using "
|
||||
"langchain_community.vectorstores.utils.filter_complex_metadata."
|
||||
)
|
||||
raise ValueError(e.args[0] + "\n\n" + msg)
|
||||
raise ValueError(e.args[0] + "\n\n" + msg) from e
|
||||
raise e
|
||||
if empty_ids:
|
||||
texts_without_metadatas = [texts[j] for j in empty_ids]
|
||||
|
@ -64,6 +64,7 @@ target-version = "py39"
|
||||
[tool.ruff.lint]
|
||||
select = [
|
||||
"A", # flake8-builtins
|
||||
"B", # flake8-bugbear
|
||||
"ASYNC", # flake8-async
|
||||
"C4", # flake8-comprehensions
|
||||
"COM", # flake8-commas
|
||||
|
@ -787,7 +787,7 @@ def test_collection_none_after_delete(
|
||||
_ = vectorstore._collection
|
||||
with pytest.raises(Exception, match="does not exist"):
|
||||
vectorstore._client.get_collection("test_collection")
|
||||
with pytest.raises(Exception):
|
||||
with pytest.raises(Exception): # noqa: B017
|
||||
vectorstore.similarity_search("foo")
|
||||
|
||||
|
||||
|
@ -418,7 +418,8 @@ class ChatGroq(BaseChatModel):
|
||||
warnings.warn(
|
||||
f"""WARNING! {field_name} is not default parameter.
|
||||
{field_name} was transferred to model_kwargs.
|
||||
Please confirm that {field_name} is what you intended."""
|
||||
Please confirm that {field_name} is what you intended.""",
|
||||
stacklevel=2,
|
||||
)
|
||||
extra[field_name] = values.pop(field_name)
|
||||
|
||||
|
@ -46,6 +46,7 @@ target-version = "py39"
|
||||
[tool.ruff.lint]
|
||||
select = [
|
||||
"A", # flake8-builtins
|
||||
"B", # flake8-bugbear
|
||||
"ASYNC", # flake8-async
|
||||
"C4", # flake8-comprehensions
|
||||
"COM", # flake8-commas
|
||||
|
@ -98,12 +98,12 @@ class HuggingFaceEndpointEmbeddings(BaseModel, Embeddings):
|
||||
self.client = client
|
||||
self.async_client = async_client
|
||||
|
||||
except ImportError:
|
||||
except ImportError as e:
|
||||
msg = (
|
||||
"Could not import huggingface_hub python package. "
|
||||
"Please install it with `pip install huggingface_hub`."
|
||||
)
|
||||
raise ImportError(msg)
|
||||
raise ImportError(msg) from e
|
||||
return self
|
||||
|
||||
def embed_documents(self, texts: list[str]) -> list[list[float]]:
|
||||
|
@ -115,12 +115,12 @@ class HuggingFacePipeline(BaseLLM):
|
||||
)
|
||||
from transformers import pipeline as hf_pipeline # type: ignore[import]
|
||||
|
||||
except ImportError:
|
||||
except ImportError as e:
|
||||
msg = (
|
||||
"Could not import transformers python package. "
|
||||
"Please install it with `pip install transformers`."
|
||||
)
|
||||
raise ValueError(msg)
|
||||
raise ValueError(msg) from e
|
||||
|
||||
_model_kwargs = model_kwargs.copy() if model_kwargs else {}
|
||||
if device_map is not None:
|
||||
|
@ -60,6 +60,7 @@ target-version = "py39"
|
||||
[tool.ruff.lint]
|
||||
select = [
|
||||
"A", # flake8-builtins
|
||||
"B", # flake8-bugbear
|
||||
"ASYNC", # flake8-async
|
||||
"C4", # flake8-comprehensions
|
||||
"COM", # flake8-commas
|
||||
@ -110,7 +111,6 @@ ignore = [
|
||||
"RUF012", # Doesn't play well with Pydantic
|
||||
"SLF001", # Private member access
|
||||
"UP007", # pyupgrade: non-pep604-annotation-union
|
||||
|
||||
]
|
||||
|
||||
[tool.coverage.run]
|
||||
|
Loading…
Reference in New Issue
Block a user