From ae210c1590cfc2deefb5a0984b0b57ab036213e3 Mon Sep 17 00:00:00 2001 From: Mason Daugherty Date: Tue, 8 Jul 2025 12:22:55 -0400 Subject: [PATCH] ruff: add bugbear across packages (#31917) WIP, other packages will get in next PRs --- libs/cli/langchain_cli/namespaces/app.py | 19 ++++++++++++++----- .../langchain_cli/namespaces/integration.py | 2 +- libs/cli/langchain_cli/utils/git.py | 4 ++-- libs/cli/pyproject.toml | 2 ++ libs/langchain/langchain/__init__.py | 4 ++-- libs/langchain/langchain/chains/base.py | 2 +- .../chains/conversational_retrieval/base.py | 2 +- .../langchain/chains/llm_checker/base.py | 2 +- .../langchain/chains/llm_math/base.py | 2 +- .../chains/llm_summarization_checker/base.py | 2 +- .../langchain/langchain/chains/natbot/base.py | 2 +- .../chains/qa_with_sources/vector_db.py | 2 +- .../chains/query_constructor/parser.py | 2 +- libs/langchain/langchain/evaluation/schema.py | 4 ++-- .../langchain/indexes/vectorstore.py | 2 +- .../langchain/langchain/memory/chat_memory.py | 2 +- libs/langchain/langchain/memory/combined.py | 2 +- .../langchain/retrievers/self_query/base.py | 4 ++-- .../smith/evaluation/runner_utils.py | 4 ++-- libs/langchain/langchain/tools/__init__.py | 2 +- libs/langchain/pyproject.toml | 2 ++ .../langchain_anthropic/chat_models.py | 8 ++++---- .../anthropic/langchain_anthropic/llms.py | 1 + libs/partners/anthropic/pyproject.toml | 1 + .../integration_tests/test_chat_models.py | 1 - .../chroma/langchain_chroma/vectorstores.py | 4 ++-- libs/partners/chroma/pyproject.toml | 1 + .../integration_tests/test_vectorstores.py | 2 +- .../groq/langchain_groq/chat_models.py | 3 ++- libs/partners/groq/pyproject.toml | 1 + .../embeddings/huggingface_endpoint.py | 4 ++-- .../llms/huggingface_pipeline.py | 4 ++-- libs/partners/huggingface/pyproject.toml | 2 +- 33 files changed, 59 insertions(+), 42 deletions(-) diff --git a/libs/cli/langchain_cli/namespaces/app.py b/libs/cli/langchain_cli/namespaces/app.py index a2eefe07ac5..01ec78885bf 100644 --- a/libs/cli/langchain_cli/namespaces/app.py +++ b/libs/cli/langchain_cli/namespaces/app.py @@ -134,19 +134,22 @@ def add( typer.Argument(help="The dependency to add"), ] = None, *, - api_path: Annotated[list[str], typer.Option(help="API paths to add")] = [], + api_path: Annotated[ + Optional[list[str]], + typer.Option(help="API paths to add"), + ] = None, project_dir: Annotated[ Optional[Path], typer.Option(help="The project directory"), ] = None, repo: Annotated[ - list[str], + Optional[list[str]], typer.Option(help="Install templates from a specific github repo instead"), - ] = [], + ] = None, branch: Annotated[ - list[str], + Optional[list[str]], typer.Option(help="Install templates from a specific branch"), - ] = [], + ] = None, pip: Annotated[ bool, typer.Option( @@ -163,6 +166,12 @@ def add( langchain app add extraction-openai-functions langchain app add git+ssh://git@github.com/efriis/simple-pirate.git """ + if branch is None: + branch = [] + if repo is None: + repo = [] + if api_path is None: + api_path = [] if not branch and not repo: warnings.warn( "Adding templates from the default branch and repo is deprecated." diff --git a/libs/cli/langchain_cli/namespaces/integration.py b/libs/cli/langchain_cli/namespaces/integration.py index 9f4cfef574d..87ce9a11caa 100644 --- a/libs/cli/langchain_cli/namespaces/integration.py +++ b/libs/cli/langchain_cli/namespaces/integration.py @@ -90,7 +90,7 @@ def new( replacements = _process_name(name) except ValueError as e: typer.echo(e) - raise typer.Exit(code=1) + raise typer.Exit(code=1) from None if name_class: if not re.match(r"^[A-Z][a-zA-Z0-9]*$", name_class): diff --git a/libs/cli/langchain_cli/utils/git.py b/libs/cli/langchain_cli/utils/git.py index f45a2889270..68155d417c1 100644 --- a/libs/cli/langchain_cli/utils/git.py +++ b/libs/cli/langchain_cli/utils/git.py @@ -65,7 +65,7 @@ def parse_dependency_string( else: _, post_slash = find_slash.split("/", 1) if "@" in post_slash or "#" in post_slash: - _, ref = re.split(r"[@#]", post_slash, 1) + _, ref = re.split(r"[@#]", post_slash, maxsplit=1) # gitstring is everything before that gitstring = gitstring[: -len(ref) - 1] if ref is not None else gitstring @@ -159,7 +159,7 @@ def _get_repo_path(gitstring: str, ref: Optional[str], repo_dir: Path) -> Path: hashed = hashlib.sha256((f"{gitstring}:{ref_str}").encode()).hexdigest()[:8] removed_protocol = gitstring.split("://")[-1] - removed_basename = re.split(r"[/:]", removed_protocol, 1)[-1] + removed_basename = re.split(r"[/:]", removed_protocol, maxsplit=1)[-1] removed_extras = removed_basename.split("#")[0] foldername = re.sub(r"\W", "_", removed_extras) diff --git a/libs/cli/pyproject.toml b/libs/cli/pyproject.toml index d421b6318f8..fdc3dba53fb 100644 --- a/libs/cli/pyproject.toml +++ b/libs/cli/pyproject.toml @@ -50,6 +50,7 @@ exclude = [ [tool.ruff.lint] select = [ "A", # flake8-builtins + "B", # flake8-bugbear "ARG", # flake8-unused-arguments "ASYNC", # flake8-async "C4", # flake8-comprehensions @@ -96,6 +97,7 @@ ignore = [ "D105", # pydocstyle: Missing docstring in magic method "D107", # pydocstyle: Missing docstring in __init__ "D407", # pydocstyle: Missing-dashed-underline-after-section + "COM812", # Messes with the formatter ] pyupgrade.keep-runtime-typing = true diff --git a/libs/langchain/langchain/__init__.py b/libs/langchain/langchain/__init__.py index edc20ad2b3a..fd5f8851d4f 100644 --- a/libs/langchain/langchain/__init__.py +++ b/libs/langchain/langchain/__init__.py @@ -29,12 +29,12 @@ def _warn_on_import(name: str, replacement: Optional[str] = None) -> None: warnings.warn( f"Importing {name} from langchain root module is no longer supported. " f"Please use {replacement} instead.", - stacklevel=3, + stacklevel=2, ) else: warnings.warn( f"Importing {name} from langchain root module is no longer supported.", - stacklevel=3, + stacklevel=2, ) diff --git a/libs/langchain/langchain/chains/base.py b/libs/langchain/langchain/chains/base.py index 27aec413319..fd24eee8e7f 100644 --- a/libs/langchain/langchain/chains/base.py +++ b/libs/langchain/langchain/chains/base.py @@ -255,7 +255,7 @@ class Chain(RunnableSerializable[dict[str, Any], dict[str, Any]], ABC): warnings.warn( "callback_manager is deprecated. Please use callbacks instead.", DeprecationWarning, - stacklevel=4, + stacklevel=2, ) values["callbacks"] = values.pop("callback_manager", None) return values diff --git a/libs/langchain/langchain/chains/conversational_retrieval/base.py b/libs/langchain/langchain/chains/conversational_retrieval/base.py index 76c80072732..a4f17554dba 100644 --- a/libs/langchain/langchain/chains/conversational_retrieval/base.py +++ b/libs/langchain/langchain/chains/conversational_retrieval/base.py @@ -504,7 +504,7 @@ class ChatVectorDBChain(BaseConversationalRetrievalChain): warnings.warn( "`ChatVectorDBChain` is deprecated - " "please use `from langchain.chains import ConversationalRetrievalChain`", - stacklevel=4, + stacklevel=2, ) return values diff --git a/libs/langchain/langchain/chains/llm_checker/base.py b/libs/langchain/langchain/chains/llm_checker/base.py index a8cbd11f298..9206b7d2536 100644 --- a/libs/langchain/langchain/chains/llm_checker/base.py +++ b/libs/langchain/langchain/chains/llm_checker/base.py @@ -112,7 +112,7 @@ class LLMCheckerChain(Chain): "Directly instantiating an LLMCheckerChain with an llm is deprecated. " "Please instantiate with question_to_checked_assertions_chain " "or using the from_llm class method.", - stacklevel=5, + stacklevel=2, ) if ( "question_to_checked_assertions_chain" not in values diff --git a/libs/langchain/langchain/chains/llm_math/base.py b/libs/langchain/langchain/chains/llm_math/base.py index 58ea43c5af5..9830734919d 100644 --- a/libs/langchain/langchain/chains/llm_math/base.py +++ b/libs/langchain/langchain/chains/llm_math/base.py @@ -177,7 +177,7 @@ class LLMMathChain(Chain): "Directly instantiating an LLMMathChain with an llm is deprecated. " "Please instantiate with llm_chain argument or using the from_llm " "class method.", - stacklevel=5, + stacklevel=2, ) if "llm_chain" not in values and values["llm"] is not None: prompt = values.get("prompt", PROMPT) diff --git a/libs/langchain/langchain/chains/llm_summarization_checker/base.py b/libs/langchain/langchain/chains/llm_summarization_checker/base.py index 7b75b9917cd..fca7c2ae0f5 100644 --- a/libs/langchain/langchain/chains/llm_summarization_checker/base.py +++ b/libs/langchain/langchain/chains/llm_summarization_checker/base.py @@ -118,7 +118,7 @@ class LLMSummarizationCheckerChain(Chain): "Directly instantiating an LLMSummarizationCheckerChain with an llm is " "deprecated. Please instantiate with" " sequential_chain argument or using the from_llm class method.", - stacklevel=5, + stacklevel=2, ) if "sequential_chain" not in values and values["llm"] is not None: values["sequential_chain"] = _load_sequential_chain( diff --git a/libs/langchain/langchain/chains/natbot/base.py b/libs/langchain/langchain/chains/natbot/base.py index a178c208953..c1108ce4734 100644 --- a/libs/langchain/langchain/chains/natbot/base.py +++ b/libs/langchain/langchain/chains/natbot/base.py @@ -74,7 +74,7 @@ class NatBotChain(Chain): "Directly instantiating an NatBotChain with an llm is deprecated. " "Please instantiate with llm_chain argument or using the from_llm " "class method.", - stacklevel=5, + stacklevel=2, ) if "llm_chain" not in values and values["llm"] is not None: values["llm_chain"] = PROMPT | values["llm"] | StrOutputParser() diff --git a/libs/langchain/langchain/chains/qa_with_sources/vector_db.py b/libs/langchain/langchain/chains/qa_with_sources/vector_db.py index 566309bde41..bc5509fcdea 100644 --- a/libs/langchain/langchain/chains/qa_with_sources/vector_db.py +++ b/libs/langchain/langchain/chains/qa_with_sources/vector_db.py @@ -77,7 +77,7 @@ class VectorDBQAWithSourcesChain(BaseQAWithSourcesChain): warnings.warn( "`VectorDBQAWithSourcesChain` is deprecated - " "please use `from langchain.chains import RetrievalQAWithSourcesChain`", - stacklevel=5, + stacklevel=2, ) return values diff --git a/libs/langchain/langchain/chains/query_constructor/parser.py b/libs/langchain/langchain/chains/query_constructor/parser.py index 1d0f00a448a..ab973d948e6 100644 --- a/libs/langchain/langchain/chains/query_constructor/parser.py +++ b/libs/langchain/langchain/chains/query_constructor/parser.py @@ -162,7 +162,7 @@ class QueryTransformer(Transformer): warnings.warn( "Dates are expected to be provided in ISO 8601 date format " "(YYYY-MM-DD).", - stacklevel=3, + stacklevel=2, ) return {"date": item, "type": "date"} diff --git a/libs/langchain/langchain/evaluation/schema.py b/libs/langchain/langchain/evaluation/schema.py index 588d6c46346..402722bca60 100644 --- a/libs/langchain/langchain/evaluation/schema.py +++ b/libs/langchain/langchain/evaluation/schema.py @@ -124,12 +124,12 @@ class _EvalArgsMixin: msg = f"{self.__class__.__name__} requires an input string." raise ValueError(msg) if input_ is not None and not self.requires_input: - warn(self._skip_input_warning, stacklevel=3) + warn(self._skip_input_warning, stacklevel=2) if self.requires_reference and reference is None: msg = f"{self.__class__.__name__} requires a reference string." raise ValueError(msg) if reference is not None and not self.requires_reference: - warn(self._skip_reference_warning, stacklevel=3) + warn(self._skip_reference_warning, stacklevel=2) class StringEvaluator(_EvalArgsMixin, ABC): diff --git a/libs/langchain/langchain/indexes/vectorstore.py b/libs/langchain/langchain/indexes/vectorstore.py index c1e311feece..e6889160c5e 100644 --- a/libs/langchain/langchain/indexes/vectorstore.py +++ b/libs/langchain/langchain/indexes/vectorstore.py @@ -183,7 +183,7 @@ def _get_in_memory_vectorstore() -> type[VectorStore]: "Using InMemoryVectorStore as the default vectorstore." "This memory store won't persist data. You should explicitly" "specify a vectorstore when using VectorstoreIndexCreator", - stacklevel=3, + stacklevel=2, ) return InMemoryVectorStore diff --git a/libs/langchain/langchain/memory/chat_memory.py b/libs/langchain/langchain/memory/chat_memory.py index 10a8cda61cc..e94ab712bcf 100644 --- a/libs/langchain/langchain/memory/chat_memory.py +++ b/libs/langchain/langchain/memory/chat_memory.py @@ -58,7 +58,7 @@ class BaseChatMemory(BaseMemory, ABC): f"'{self.__class__.__name__}' got multiple output keys:" f" {outputs.keys()}. The default 'output' key is being used." f" If this is not desired, please manually set 'output_key'.", - stacklevel=3, + stacklevel=2, ) else: msg = ( diff --git a/libs/langchain/langchain/memory/combined.py b/libs/langchain/langchain/memory/combined.py index eb25197ec84..83978bd2629 100644 --- a/libs/langchain/langchain/memory/combined.py +++ b/libs/langchain/langchain/memory/combined.py @@ -42,7 +42,7 @@ class CombinedMemory(BaseMemory): "When using CombinedMemory, " "input keys should be so the input is known. " f" Was not set on {val}", - stacklevel=5, + stacklevel=2, ) return value diff --git a/libs/langchain/langchain/retrievers/self_query/base.py b/libs/langchain/langchain/retrievers/self_query/base.py index f73d84a6d6c..996b9fd876b 100644 --- a/libs/langchain/langchain/retrievers/self_query/base.py +++ b/libs/langchain/langchain/retrievers/self_query/base.py @@ -27,12 +27,12 @@ def _get_builtin_translator(vectorstore: VectorStore) -> Visitor: """Get the translator class corresponding to the vector store class.""" try: import langchain_community # noqa: F401 - except ImportError as e: + except ImportError as err: msg = ( "The langchain-community package must be installed to use this feature." " Please install it using `pip install langchain-community`." ) - raise ImportError(msg) from e + raise ImportError(msg) from err from langchain_community.query_constructors.astradb import AstraDBTranslator from langchain_community.query_constructors.chroma import ChromaTranslator diff --git a/libs/langchain/langchain/smith/evaluation/runner_utils.py b/libs/langchain/langchain/smith/evaluation/runner_utils.py index 193e05556a3..99a7f2487e1 100644 --- a/libs/langchain/langchain/smith/evaluation/runner_utils.py +++ b/libs/langchain/langchain/smith/evaluation/runner_utils.py @@ -350,7 +350,7 @@ def _validate_example_inputs_for_language_model( except InputFormatError: try: _get_messages(first_example.inputs or {}) - except InputFormatError as e: + except InputFormatError as err2: msg = ( "Example inputs do not match language model input format. " "Expected a dictionary with messages or a single prompt." @@ -359,7 +359,7 @@ def _validate_example_inputs_for_language_model( " to convert the example.inputs to a compatible format" " for the llm or chat model you wish to evaluate." ) - raise InputFormatError(msg) from e + raise InputFormatError(msg) from err2 def _validate_example_inputs_for_chain( diff --git a/libs/langchain/langchain/tools/__init__.py b/libs/langchain/langchain/tools/__init__.py index 428d1181805..0934636ac2b 100644 --- a/libs/langchain/langchain/tools/__init__.py +++ b/libs/langchain/langchain/tools/__init__.py @@ -78,8 +78,8 @@ def __getattr__(name: str) -> Any: f"`from langchain_community.tools import {name}`.\n\n" "To install langchain-community run " "`pip install -U langchain-community`.", - category=LangChainDeprecationWarning, stacklevel=2, + category=LangChainDeprecationWarning, ) return getattr(tools, name) diff --git a/libs/langchain/pyproject.toml b/libs/langchain/pyproject.toml index c34f56ca08b..3f6f229e808 100644 --- a/libs/langchain/pyproject.toml +++ b/libs/langchain/pyproject.toml @@ -145,6 +145,7 @@ ignore-words-list = "momento,collison,ned,foor,reworkd,parth,whats,aapply,mysogy [tool.ruff.lint] select = [ "A", # flake8-builtins + "B", # flake8-bugbear "ASYNC", # flake8-async "B", # flake8-bugbear "C4", # flake8-comprehensions @@ -195,6 +196,7 @@ ignore = [ "S112", # Rarely useful "RUF012", # Doesn't play well with Pydantic "SLF001", # Private member access + "UP007", # pyupgrade: non-pep604-annotation-union ] pydocstyle.convention = "google" diff --git a/libs/partners/anthropic/langchain_anthropic/chat_models.py b/libs/partners/anthropic/langchain_anthropic/chat_models.py index cf6017f99b3..ea055d349ce 100644 --- a/libs/partners/anthropic/langchain_anthropic/chat_models.py +++ b/libs/partners/anthropic/langchain_anthropic/chat_models.py @@ -314,7 +314,7 @@ def _format_messages( system: Union[str, list[dict], None] = None formatted_messages: list[dict] = [] merged_messages = _merge_messages(messages) - for i, message in enumerate(merged_messages): + for _i, message in enumerate(merged_messages): if message.type == "system": if system is not None: msg = "Received multiple non-consecutive system messages." @@ -496,7 +496,7 @@ def _handle_anthropic_bad_request(e: anthropic.BadRequestError) -> None: """Handle Anthropic BadRequestError.""" if ("messages: at least one message is required") in e.message: message = "Received only system message(s). " - warnings.warn(message) + warnings.warn(message, stacklevel=2) raise e raise @@ -1558,7 +1558,7 @@ class ChatAnthropic(BaseChatModel): "generated. Consider disabling `thinking` or adjust your prompt to ensure " "the tool is called." ) - warnings.warn(thinking_admonition) + warnings.warn(thinking_admonition, stacklevel=2) llm = self.bind_tools( [schema], ls_structured_output_format={ @@ -2120,7 +2120,7 @@ def _make_message_chunk_from_anthropic_event( ) ): if coerce_content_to_string: - warnings.warn("Received unexpected tool content block.") + warnings.warn("Received unexpected tool content block.", stacklevel=2) content_block = event.content_block.model_dump() content_block["index"] = event.index if event.content_block.type == "tool_use": diff --git a/libs/partners/anthropic/langchain_anthropic/llms.py b/libs/partners/anthropic/langchain_anthropic/llms.py index 772a1ea1a8c..0ad6d247fed 100644 --- a/libs/partners/anthropic/langchain_anthropic/llms.py +++ b/libs/partners/anthropic/langchain_anthropic/llms.py @@ -170,6 +170,7 @@ class AnthropicLLM(LLM, _AnthropicCommon): "This Anthropic LLM is deprecated. " "Please use `from langchain_anthropic import ChatAnthropic` " "instead", + stacklevel=2, ) return values diff --git a/libs/partners/anthropic/pyproject.toml b/libs/partners/anthropic/pyproject.toml index 7d0388a6959..15dd8409236 100644 --- a/libs/partners/anthropic/pyproject.toml +++ b/libs/partners/anthropic/pyproject.toml @@ -62,6 +62,7 @@ target-version = "py39" [tool.ruff.lint] select = [ "A", # flake8-builtins + "B", # flake8-bugbear "ASYNC", # flake8-async "C4", # flake8-comprehensions "COM", # flake8-commas diff --git a/libs/partners/anthropic/tests/integration_tests/test_chat_models.py b/libs/partners/anthropic/tests/integration_tests/test_chat_models.py index d5ea341cb5a..1bfad6214e0 100644 --- a/libs/partners/anthropic/tests/integration_tests/test_chat_models.py +++ b/libs/partners/anthropic/tests/integration_tests/test_chat_models.py @@ -1210,6 +1210,5 @@ def test_search_result_top_level() -> None: def test_async_shared_client() -> None: llm = ChatAnthropic(model="claude-3-5-haiku-latest") - llm._async_client # Instantiates lazily _ = asyncio.run(llm.ainvoke("Hello")) _ = asyncio.run(llm.ainvoke("Hello")) diff --git a/libs/partners/chroma/langchain_chroma/vectorstores.py b/libs/partners/chroma/langchain_chroma/vectorstores.py index 2b966f662f2..9035fe74afb 100644 --- a/libs/partners/chroma/langchain_chroma/vectorstores.py +++ b/libs/partners/chroma/langchain_chroma/vectorstores.py @@ -479,7 +479,7 @@ class Chroma(VectorStore): "Try filtering complex metadata using " "langchain_community.vectorstores.utils.filter_complex_metadata." ) - raise ValueError(e.args[0] + "\n\n" + msg) + raise ValueError(e.args[0] + "\n\n" + msg) from e raise e if empty_ids: images_without_metadatas = [b64_texts[j] for j in empty_ids] @@ -566,7 +566,7 @@ class Chroma(VectorStore): "Try filtering complex metadata from the document using " "langchain_community.vectorstores.utils.filter_complex_metadata." ) - raise ValueError(e.args[0] + "\n\n" + msg) + raise ValueError(e.args[0] + "\n\n" + msg) from e raise e if empty_ids: texts_without_metadatas = [texts[j] for j in empty_ids] diff --git a/libs/partners/chroma/pyproject.toml b/libs/partners/chroma/pyproject.toml index dc1d6a6d5e1..c6e82e40ad5 100644 --- a/libs/partners/chroma/pyproject.toml +++ b/libs/partners/chroma/pyproject.toml @@ -64,6 +64,7 @@ target-version = "py39" [tool.ruff.lint] select = [ "A", # flake8-builtins + "B", # flake8-bugbear "ASYNC", # flake8-async "C4", # flake8-comprehensions "COM", # flake8-commas diff --git a/libs/partners/chroma/tests/integration_tests/test_vectorstores.py b/libs/partners/chroma/tests/integration_tests/test_vectorstores.py index f5794c3875c..99943c2f5a3 100644 --- a/libs/partners/chroma/tests/integration_tests/test_vectorstores.py +++ b/libs/partners/chroma/tests/integration_tests/test_vectorstores.py @@ -787,7 +787,7 @@ def test_collection_none_after_delete( _ = vectorstore._collection with pytest.raises(Exception, match="does not exist"): vectorstore._client.get_collection("test_collection") - with pytest.raises(Exception): + with pytest.raises(Exception): # noqa: B017 vectorstore.similarity_search("foo") diff --git a/libs/partners/groq/langchain_groq/chat_models.py b/libs/partners/groq/langchain_groq/chat_models.py index c374dea3cce..b4541a648ff 100644 --- a/libs/partners/groq/langchain_groq/chat_models.py +++ b/libs/partners/groq/langchain_groq/chat_models.py @@ -418,7 +418,8 @@ class ChatGroq(BaseChatModel): warnings.warn( f"""WARNING! {field_name} is not default parameter. {field_name} was transferred to model_kwargs. - Please confirm that {field_name} is what you intended.""" + Please confirm that {field_name} is what you intended.""", + stacklevel=2, ) extra[field_name] = values.pop(field_name) diff --git a/libs/partners/groq/pyproject.toml b/libs/partners/groq/pyproject.toml index 3fd3d38c03a..3a75d8f66d4 100644 --- a/libs/partners/groq/pyproject.toml +++ b/libs/partners/groq/pyproject.toml @@ -46,6 +46,7 @@ target-version = "py39" [tool.ruff.lint] select = [ "A", # flake8-builtins + "B", # flake8-bugbear "ASYNC", # flake8-async "C4", # flake8-comprehensions "COM", # flake8-commas diff --git a/libs/partners/huggingface/langchain_huggingface/embeddings/huggingface_endpoint.py b/libs/partners/huggingface/langchain_huggingface/embeddings/huggingface_endpoint.py index d6c21019198..7cf12b8c65e 100644 --- a/libs/partners/huggingface/langchain_huggingface/embeddings/huggingface_endpoint.py +++ b/libs/partners/huggingface/langchain_huggingface/embeddings/huggingface_endpoint.py @@ -98,12 +98,12 @@ class HuggingFaceEndpointEmbeddings(BaseModel, Embeddings): self.client = client self.async_client = async_client - except ImportError: + except ImportError as e: msg = ( "Could not import huggingface_hub python package. " "Please install it with `pip install huggingface_hub`." ) - raise ImportError(msg) + raise ImportError(msg) from e return self def embed_documents(self, texts: list[str]) -> list[list[float]]: diff --git a/libs/partners/huggingface/langchain_huggingface/llms/huggingface_pipeline.py b/libs/partners/huggingface/langchain_huggingface/llms/huggingface_pipeline.py index c9b17712a97..7a53427520c 100644 --- a/libs/partners/huggingface/langchain_huggingface/llms/huggingface_pipeline.py +++ b/libs/partners/huggingface/langchain_huggingface/llms/huggingface_pipeline.py @@ -115,12 +115,12 @@ class HuggingFacePipeline(BaseLLM): ) from transformers import pipeline as hf_pipeline # type: ignore[import] - except ImportError: + except ImportError as e: msg = ( "Could not import transformers python package. " "Please install it with `pip install transformers`." ) - raise ValueError(msg) + raise ValueError(msg) from e _model_kwargs = model_kwargs.copy() if model_kwargs else {} if device_map is not None: diff --git a/libs/partners/huggingface/pyproject.toml b/libs/partners/huggingface/pyproject.toml index 7066f02b424..975ae5fa4e4 100644 --- a/libs/partners/huggingface/pyproject.toml +++ b/libs/partners/huggingface/pyproject.toml @@ -60,6 +60,7 @@ target-version = "py39" [tool.ruff.lint] select = [ "A", # flake8-builtins + "B", # flake8-bugbear "ASYNC", # flake8-async "C4", # flake8-comprehensions "COM", # flake8-commas @@ -110,7 +111,6 @@ ignore = [ "RUF012", # Doesn't play well with Pydantic "SLF001", # Private member access "UP007", # pyupgrade: non-pep604-annotation-union - ] [tool.coverage.run]