mirror of
https://github.com/hwchase17/langchain.git
synced 2025-07-11 15:35:09 +00:00
ruff: add bugbear across packages (#31917)
WIP, other packages will get in next PRs
This commit is contained in:
parent
5b3e29f809
commit
ae210c1590
@ -134,19 +134,22 @@ def add(
|
|||||||
typer.Argument(help="The dependency to add"),
|
typer.Argument(help="The dependency to add"),
|
||||||
] = None,
|
] = None,
|
||||||
*,
|
*,
|
||||||
api_path: Annotated[list[str], typer.Option(help="API paths to add")] = [],
|
api_path: Annotated[
|
||||||
|
Optional[list[str]],
|
||||||
|
typer.Option(help="API paths to add"),
|
||||||
|
] = None,
|
||||||
project_dir: Annotated[
|
project_dir: Annotated[
|
||||||
Optional[Path],
|
Optional[Path],
|
||||||
typer.Option(help="The project directory"),
|
typer.Option(help="The project directory"),
|
||||||
] = None,
|
] = None,
|
||||||
repo: Annotated[
|
repo: Annotated[
|
||||||
list[str],
|
Optional[list[str]],
|
||||||
typer.Option(help="Install templates from a specific github repo instead"),
|
typer.Option(help="Install templates from a specific github repo instead"),
|
||||||
] = [],
|
] = None,
|
||||||
branch: Annotated[
|
branch: Annotated[
|
||||||
list[str],
|
Optional[list[str]],
|
||||||
typer.Option(help="Install templates from a specific branch"),
|
typer.Option(help="Install templates from a specific branch"),
|
||||||
] = [],
|
] = None,
|
||||||
pip: Annotated[
|
pip: Annotated[
|
||||||
bool,
|
bool,
|
||||||
typer.Option(
|
typer.Option(
|
||||||
@ -163,6 +166,12 @@ def add(
|
|||||||
langchain app add extraction-openai-functions
|
langchain app add extraction-openai-functions
|
||||||
langchain app add git+ssh://git@github.com/efriis/simple-pirate.git
|
langchain app add git+ssh://git@github.com/efriis/simple-pirate.git
|
||||||
"""
|
"""
|
||||||
|
if branch is None:
|
||||||
|
branch = []
|
||||||
|
if repo is None:
|
||||||
|
repo = []
|
||||||
|
if api_path is None:
|
||||||
|
api_path = []
|
||||||
if not branch and not repo:
|
if not branch and not repo:
|
||||||
warnings.warn(
|
warnings.warn(
|
||||||
"Adding templates from the default branch and repo is deprecated."
|
"Adding templates from the default branch and repo is deprecated."
|
||||||
|
@ -90,7 +90,7 @@ def new(
|
|||||||
replacements = _process_name(name)
|
replacements = _process_name(name)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
typer.echo(e)
|
typer.echo(e)
|
||||||
raise typer.Exit(code=1)
|
raise typer.Exit(code=1) from None
|
||||||
|
|
||||||
if name_class:
|
if name_class:
|
||||||
if not re.match(r"^[A-Z][a-zA-Z0-9]*$", name_class):
|
if not re.match(r"^[A-Z][a-zA-Z0-9]*$", name_class):
|
||||||
|
@ -65,7 +65,7 @@ def parse_dependency_string(
|
|||||||
else:
|
else:
|
||||||
_, post_slash = find_slash.split("/", 1)
|
_, post_slash = find_slash.split("/", 1)
|
||||||
if "@" in post_slash or "#" in post_slash:
|
if "@" in post_slash or "#" in post_slash:
|
||||||
_, ref = re.split(r"[@#]", post_slash, 1)
|
_, ref = re.split(r"[@#]", post_slash, maxsplit=1)
|
||||||
|
|
||||||
# gitstring is everything before that
|
# gitstring is everything before that
|
||||||
gitstring = gitstring[: -len(ref) - 1] if ref is not None else gitstring
|
gitstring = gitstring[: -len(ref) - 1] if ref is not None else gitstring
|
||||||
@ -159,7 +159,7 @@ def _get_repo_path(gitstring: str, ref: Optional[str], repo_dir: Path) -> Path:
|
|||||||
hashed = hashlib.sha256((f"{gitstring}:{ref_str}").encode()).hexdigest()[:8]
|
hashed = hashlib.sha256((f"{gitstring}:{ref_str}").encode()).hexdigest()[:8]
|
||||||
|
|
||||||
removed_protocol = gitstring.split("://")[-1]
|
removed_protocol = gitstring.split("://")[-1]
|
||||||
removed_basename = re.split(r"[/:]", removed_protocol, 1)[-1]
|
removed_basename = re.split(r"[/:]", removed_protocol, maxsplit=1)[-1]
|
||||||
removed_extras = removed_basename.split("#")[0]
|
removed_extras = removed_basename.split("#")[0]
|
||||||
foldername = re.sub(r"\W", "_", removed_extras)
|
foldername = re.sub(r"\W", "_", removed_extras)
|
||||||
|
|
||||||
|
@ -50,6 +50,7 @@ exclude = [
|
|||||||
[tool.ruff.lint]
|
[tool.ruff.lint]
|
||||||
select = [
|
select = [
|
||||||
"A", # flake8-builtins
|
"A", # flake8-builtins
|
||||||
|
"B", # flake8-bugbear
|
||||||
"ARG", # flake8-unused-arguments
|
"ARG", # flake8-unused-arguments
|
||||||
"ASYNC", # flake8-async
|
"ASYNC", # flake8-async
|
||||||
"C4", # flake8-comprehensions
|
"C4", # flake8-comprehensions
|
||||||
@ -96,6 +97,7 @@ ignore = [
|
|||||||
"D105", # pydocstyle: Missing docstring in magic method
|
"D105", # pydocstyle: Missing docstring in magic method
|
||||||
"D107", # pydocstyle: Missing docstring in __init__
|
"D107", # pydocstyle: Missing docstring in __init__
|
||||||
"D407", # pydocstyle: Missing-dashed-underline-after-section
|
"D407", # pydocstyle: Missing-dashed-underline-after-section
|
||||||
|
"COM812", # Messes with the formatter
|
||||||
]
|
]
|
||||||
pyupgrade.keep-runtime-typing = true
|
pyupgrade.keep-runtime-typing = true
|
||||||
|
|
||||||
|
@ -29,12 +29,12 @@ def _warn_on_import(name: str, replacement: Optional[str] = None) -> None:
|
|||||||
warnings.warn(
|
warnings.warn(
|
||||||
f"Importing {name} from langchain root module is no longer supported. "
|
f"Importing {name} from langchain root module is no longer supported. "
|
||||||
f"Please use {replacement} instead.",
|
f"Please use {replacement} instead.",
|
||||||
stacklevel=3,
|
stacklevel=2,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
warnings.warn(
|
warnings.warn(
|
||||||
f"Importing {name} from langchain root module is no longer supported.",
|
f"Importing {name} from langchain root module is no longer supported.",
|
||||||
stacklevel=3,
|
stacklevel=2,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -255,7 +255,7 @@ class Chain(RunnableSerializable[dict[str, Any], dict[str, Any]], ABC):
|
|||||||
warnings.warn(
|
warnings.warn(
|
||||||
"callback_manager is deprecated. Please use callbacks instead.",
|
"callback_manager is deprecated. Please use callbacks instead.",
|
||||||
DeprecationWarning,
|
DeprecationWarning,
|
||||||
stacklevel=4,
|
stacklevel=2,
|
||||||
)
|
)
|
||||||
values["callbacks"] = values.pop("callback_manager", None)
|
values["callbacks"] = values.pop("callback_manager", None)
|
||||||
return values
|
return values
|
||||||
|
@ -504,7 +504,7 @@ class ChatVectorDBChain(BaseConversationalRetrievalChain):
|
|||||||
warnings.warn(
|
warnings.warn(
|
||||||
"`ChatVectorDBChain` is deprecated - "
|
"`ChatVectorDBChain` is deprecated - "
|
||||||
"please use `from langchain.chains import ConversationalRetrievalChain`",
|
"please use `from langchain.chains import ConversationalRetrievalChain`",
|
||||||
stacklevel=4,
|
stacklevel=2,
|
||||||
)
|
)
|
||||||
return values
|
return values
|
||||||
|
|
||||||
|
@ -112,7 +112,7 @@ class LLMCheckerChain(Chain):
|
|||||||
"Directly instantiating an LLMCheckerChain with an llm is deprecated. "
|
"Directly instantiating an LLMCheckerChain with an llm is deprecated. "
|
||||||
"Please instantiate with question_to_checked_assertions_chain "
|
"Please instantiate with question_to_checked_assertions_chain "
|
||||||
"or using the from_llm class method.",
|
"or using the from_llm class method.",
|
||||||
stacklevel=5,
|
stacklevel=2,
|
||||||
)
|
)
|
||||||
if (
|
if (
|
||||||
"question_to_checked_assertions_chain" not in values
|
"question_to_checked_assertions_chain" not in values
|
||||||
|
@ -177,7 +177,7 @@ class LLMMathChain(Chain):
|
|||||||
"Directly instantiating an LLMMathChain with an llm is deprecated. "
|
"Directly instantiating an LLMMathChain with an llm is deprecated. "
|
||||||
"Please instantiate with llm_chain argument or using the from_llm "
|
"Please instantiate with llm_chain argument or using the from_llm "
|
||||||
"class method.",
|
"class method.",
|
||||||
stacklevel=5,
|
stacklevel=2,
|
||||||
)
|
)
|
||||||
if "llm_chain" not in values and values["llm"] is not None:
|
if "llm_chain" not in values and values["llm"] is not None:
|
||||||
prompt = values.get("prompt", PROMPT)
|
prompt = values.get("prompt", PROMPT)
|
||||||
|
@ -118,7 +118,7 @@ class LLMSummarizationCheckerChain(Chain):
|
|||||||
"Directly instantiating an LLMSummarizationCheckerChain with an llm is "
|
"Directly instantiating an LLMSummarizationCheckerChain with an llm is "
|
||||||
"deprecated. Please instantiate with"
|
"deprecated. Please instantiate with"
|
||||||
" sequential_chain argument or using the from_llm class method.",
|
" sequential_chain argument or using the from_llm class method.",
|
||||||
stacklevel=5,
|
stacklevel=2,
|
||||||
)
|
)
|
||||||
if "sequential_chain" not in values and values["llm"] is not None:
|
if "sequential_chain" not in values and values["llm"] is not None:
|
||||||
values["sequential_chain"] = _load_sequential_chain(
|
values["sequential_chain"] = _load_sequential_chain(
|
||||||
|
@ -74,7 +74,7 @@ class NatBotChain(Chain):
|
|||||||
"Directly instantiating an NatBotChain with an llm is deprecated. "
|
"Directly instantiating an NatBotChain with an llm is deprecated. "
|
||||||
"Please instantiate with llm_chain argument or using the from_llm "
|
"Please instantiate with llm_chain argument or using the from_llm "
|
||||||
"class method.",
|
"class method.",
|
||||||
stacklevel=5,
|
stacklevel=2,
|
||||||
)
|
)
|
||||||
if "llm_chain" not in values and values["llm"] is not None:
|
if "llm_chain" not in values and values["llm"] is not None:
|
||||||
values["llm_chain"] = PROMPT | values["llm"] | StrOutputParser()
|
values["llm_chain"] = PROMPT | values["llm"] | StrOutputParser()
|
||||||
|
@ -77,7 +77,7 @@ class VectorDBQAWithSourcesChain(BaseQAWithSourcesChain):
|
|||||||
warnings.warn(
|
warnings.warn(
|
||||||
"`VectorDBQAWithSourcesChain` is deprecated - "
|
"`VectorDBQAWithSourcesChain` is deprecated - "
|
||||||
"please use `from langchain.chains import RetrievalQAWithSourcesChain`",
|
"please use `from langchain.chains import RetrievalQAWithSourcesChain`",
|
||||||
stacklevel=5,
|
stacklevel=2,
|
||||||
)
|
)
|
||||||
return values
|
return values
|
||||||
|
|
||||||
|
@ -162,7 +162,7 @@ class QueryTransformer(Transformer):
|
|||||||
warnings.warn(
|
warnings.warn(
|
||||||
"Dates are expected to be provided in ISO 8601 date format "
|
"Dates are expected to be provided in ISO 8601 date format "
|
||||||
"(YYYY-MM-DD).",
|
"(YYYY-MM-DD).",
|
||||||
stacklevel=3,
|
stacklevel=2,
|
||||||
)
|
)
|
||||||
return {"date": item, "type": "date"}
|
return {"date": item, "type": "date"}
|
||||||
|
|
||||||
|
@ -124,12 +124,12 @@ class _EvalArgsMixin:
|
|||||||
msg = f"{self.__class__.__name__} requires an input string."
|
msg = f"{self.__class__.__name__} requires an input string."
|
||||||
raise ValueError(msg)
|
raise ValueError(msg)
|
||||||
if input_ is not None and not self.requires_input:
|
if input_ is not None and not self.requires_input:
|
||||||
warn(self._skip_input_warning, stacklevel=3)
|
warn(self._skip_input_warning, stacklevel=2)
|
||||||
if self.requires_reference and reference is None:
|
if self.requires_reference and reference is None:
|
||||||
msg = f"{self.__class__.__name__} requires a reference string."
|
msg = f"{self.__class__.__name__} requires a reference string."
|
||||||
raise ValueError(msg)
|
raise ValueError(msg)
|
||||||
if reference is not None and not self.requires_reference:
|
if reference is not None and not self.requires_reference:
|
||||||
warn(self._skip_reference_warning, stacklevel=3)
|
warn(self._skip_reference_warning, stacklevel=2)
|
||||||
|
|
||||||
|
|
||||||
class StringEvaluator(_EvalArgsMixin, ABC):
|
class StringEvaluator(_EvalArgsMixin, ABC):
|
||||||
|
@ -183,7 +183,7 @@ def _get_in_memory_vectorstore() -> type[VectorStore]:
|
|||||||
"Using InMemoryVectorStore as the default vectorstore."
|
"Using InMemoryVectorStore as the default vectorstore."
|
||||||
"This memory store won't persist data. You should explicitly"
|
"This memory store won't persist data. You should explicitly"
|
||||||
"specify a vectorstore when using VectorstoreIndexCreator",
|
"specify a vectorstore when using VectorstoreIndexCreator",
|
||||||
stacklevel=3,
|
stacklevel=2,
|
||||||
)
|
)
|
||||||
return InMemoryVectorStore
|
return InMemoryVectorStore
|
||||||
|
|
||||||
|
@ -58,7 +58,7 @@ class BaseChatMemory(BaseMemory, ABC):
|
|||||||
f"'{self.__class__.__name__}' got multiple output keys:"
|
f"'{self.__class__.__name__}' got multiple output keys:"
|
||||||
f" {outputs.keys()}. The default 'output' key is being used."
|
f" {outputs.keys()}. The default 'output' key is being used."
|
||||||
f" If this is not desired, please manually set 'output_key'.",
|
f" If this is not desired, please manually set 'output_key'.",
|
||||||
stacklevel=3,
|
stacklevel=2,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
msg = (
|
msg = (
|
||||||
|
@ -42,7 +42,7 @@ class CombinedMemory(BaseMemory):
|
|||||||
"When using CombinedMemory, "
|
"When using CombinedMemory, "
|
||||||
"input keys should be so the input is known. "
|
"input keys should be so the input is known. "
|
||||||
f" Was not set on {val}",
|
f" Was not set on {val}",
|
||||||
stacklevel=5,
|
stacklevel=2,
|
||||||
)
|
)
|
||||||
return value
|
return value
|
||||||
|
|
||||||
|
@ -27,12 +27,12 @@ def _get_builtin_translator(vectorstore: VectorStore) -> Visitor:
|
|||||||
"""Get the translator class corresponding to the vector store class."""
|
"""Get the translator class corresponding to the vector store class."""
|
||||||
try:
|
try:
|
||||||
import langchain_community # noqa: F401
|
import langchain_community # noqa: F401
|
||||||
except ImportError as e:
|
except ImportError as err:
|
||||||
msg = (
|
msg = (
|
||||||
"The langchain-community package must be installed to use this feature."
|
"The langchain-community package must be installed to use this feature."
|
||||||
" Please install it using `pip install langchain-community`."
|
" Please install it using `pip install langchain-community`."
|
||||||
)
|
)
|
||||||
raise ImportError(msg) from e
|
raise ImportError(msg) from err
|
||||||
|
|
||||||
from langchain_community.query_constructors.astradb import AstraDBTranslator
|
from langchain_community.query_constructors.astradb import AstraDBTranslator
|
||||||
from langchain_community.query_constructors.chroma import ChromaTranslator
|
from langchain_community.query_constructors.chroma import ChromaTranslator
|
||||||
|
@ -350,7 +350,7 @@ def _validate_example_inputs_for_language_model(
|
|||||||
except InputFormatError:
|
except InputFormatError:
|
||||||
try:
|
try:
|
||||||
_get_messages(first_example.inputs or {})
|
_get_messages(first_example.inputs or {})
|
||||||
except InputFormatError as e:
|
except InputFormatError as err2:
|
||||||
msg = (
|
msg = (
|
||||||
"Example inputs do not match language model input format. "
|
"Example inputs do not match language model input format. "
|
||||||
"Expected a dictionary with messages or a single prompt."
|
"Expected a dictionary with messages or a single prompt."
|
||||||
@ -359,7 +359,7 @@ def _validate_example_inputs_for_language_model(
|
|||||||
" to convert the example.inputs to a compatible format"
|
" to convert the example.inputs to a compatible format"
|
||||||
" for the llm or chat model you wish to evaluate."
|
" for the llm or chat model you wish to evaluate."
|
||||||
)
|
)
|
||||||
raise InputFormatError(msg) from e
|
raise InputFormatError(msg) from err2
|
||||||
|
|
||||||
|
|
||||||
def _validate_example_inputs_for_chain(
|
def _validate_example_inputs_for_chain(
|
||||||
|
@ -78,8 +78,8 @@ def __getattr__(name: str) -> Any:
|
|||||||
f"`from langchain_community.tools import {name}`.\n\n"
|
f"`from langchain_community.tools import {name}`.\n\n"
|
||||||
"To install langchain-community run "
|
"To install langchain-community run "
|
||||||
"`pip install -U langchain-community`.",
|
"`pip install -U langchain-community`.",
|
||||||
category=LangChainDeprecationWarning,
|
|
||||||
stacklevel=2,
|
stacklevel=2,
|
||||||
|
category=LangChainDeprecationWarning,
|
||||||
)
|
)
|
||||||
|
|
||||||
return getattr(tools, name)
|
return getattr(tools, name)
|
||||||
|
@ -145,6 +145,7 @@ ignore-words-list = "momento,collison,ned,foor,reworkd,parth,whats,aapply,mysogy
|
|||||||
[tool.ruff.lint]
|
[tool.ruff.lint]
|
||||||
select = [
|
select = [
|
||||||
"A", # flake8-builtins
|
"A", # flake8-builtins
|
||||||
|
"B", # flake8-bugbear
|
||||||
"ASYNC", # flake8-async
|
"ASYNC", # flake8-async
|
||||||
"B", # flake8-bugbear
|
"B", # flake8-bugbear
|
||||||
"C4", # flake8-comprehensions
|
"C4", # flake8-comprehensions
|
||||||
@ -195,6 +196,7 @@ ignore = [
|
|||||||
"S112", # Rarely useful
|
"S112", # Rarely useful
|
||||||
"RUF012", # Doesn't play well with Pydantic
|
"RUF012", # Doesn't play well with Pydantic
|
||||||
"SLF001", # Private member access
|
"SLF001", # Private member access
|
||||||
|
"UP007", # pyupgrade: non-pep604-annotation-union
|
||||||
]
|
]
|
||||||
|
|
||||||
pydocstyle.convention = "google"
|
pydocstyle.convention = "google"
|
||||||
|
@ -314,7 +314,7 @@ def _format_messages(
|
|||||||
system: Union[str, list[dict], None] = None
|
system: Union[str, list[dict], None] = None
|
||||||
formatted_messages: list[dict] = []
|
formatted_messages: list[dict] = []
|
||||||
merged_messages = _merge_messages(messages)
|
merged_messages = _merge_messages(messages)
|
||||||
for i, message in enumerate(merged_messages):
|
for _i, message in enumerate(merged_messages):
|
||||||
if message.type == "system":
|
if message.type == "system":
|
||||||
if system is not None:
|
if system is not None:
|
||||||
msg = "Received multiple non-consecutive system messages."
|
msg = "Received multiple non-consecutive system messages."
|
||||||
@ -496,7 +496,7 @@ def _handle_anthropic_bad_request(e: anthropic.BadRequestError) -> None:
|
|||||||
"""Handle Anthropic BadRequestError."""
|
"""Handle Anthropic BadRequestError."""
|
||||||
if ("messages: at least one message is required") in e.message:
|
if ("messages: at least one message is required") in e.message:
|
||||||
message = "Received only system message(s). "
|
message = "Received only system message(s). "
|
||||||
warnings.warn(message)
|
warnings.warn(message, stacklevel=2)
|
||||||
raise e
|
raise e
|
||||||
raise
|
raise
|
||||||
|
|
||||||
@ -1558,7 +1558,7 @@ class ChatAnthropic(BaseChatModel):
|
|||||||
"generated. Consider disabling `thinking` or adjust your prompt to ensure "
|
"generated. Consider disabling `thinking` or adjust your prompt to ensure "
|
||||||
"the tool is called."
|
"the tool is called."
|
||||||
)
|
)
|
||||||
warnings.warn(thinking_admonition)
|
warnings.warn(thinking_admonition, stacklevel=2)
|
||||||
llm = self.bind_tools(
|
llm = self.bind_tools(
|
||||||
[schema],
|
[schema],
|
||||||
ls_structured_output_format={
|
ls_structured_output_format={
|
||||||
@ -2120,7 +2120,7 @@ def _make_message_chunk_from_anthropic_event(
|
|||||||
)
|
)
|
||||||
):
|
):
|
||||||
if coerce_content_to_string:
|
if coerce_content_to_string:
|
||||||
warnings.warn("Received unexpected tool content block.")
|
warnings.warn("Received unexpected tool content block.", stacklevel=2)
|
||||||
content_block = event.content_block.model_dump()
|
content_block = event.content_block.model_dump()
|
||||||
content_block["index"] = event.index
|
content_block["index"] = event.index
|
||||||
if event.content_block.type == "tool_use":
|
if event.content_block.type == "tool_use":
|
||||||
|
@ -170,6 +170,7 @@ class AnthropicLLM(LLM, _AnthropicCommon):
|
|||||||
"This Anthropic LLM is deprecated. "
|
"This Anthropic LLM is deprecated. "
|
||||||
"Please use `from langchain_anthropic import ChatAnthropic` "
|
"Please use `from langchain_anthropic import ChatAnthropic` "
|
||||||
"instead",
|
"instead",
|
||||||
|
stacklevel=2,
|
||||||
)
|
)
|
||||||
return values
|
return values
|
||||||
|
|
||||||
|
@ -62,6 +62,7 @@ target-version = "py39"
|
|||||||
[tool.ruff.lint]
|
[tool.ruff.lint]
|
||||||
select = [
|
select = [
|
||||||
"A", # flake8-builtins
|
"A", # flake8-builtins
|
||||||
|
"B", # flake8-bugbear
|
||||||
"ASYNC", # flake8-async
|
"ASYNC", # flake8-async
|
||||||
"C4", # flake8-comprehensions
|
"C4", # flake8-comprehensions
|
||||||
"COM", # flake8-commas
|
"COM", # flake8-commas
|
||||||
|
@ -1210,6 +1210,5 @@ def test_search_result_top_level() -> None:
|
|||||||
|
|
||||||
def test_async_shared_client() -> None:
|
def test_async_shared_client() -> None:
|
||||||
llm = ChatAnthropic(model="claude-3-5-haiku-latest")
|
llm = ChatAnthropic(model="claude-3-5-haiku-latest")
|
||||||
llm._async_client # Instantiates lazily
|
|
||||||
_ = asyncio.run(llm.ainvoke("Hello"))
|
_ = asyncio.run(llm.ainvoke("Hello"))
|
||||||
_ = asyncio.run(llm.ainvoke("Hello"))
|
_ = asyncio.run(llm.ainvoke("Hello"))
|
||||||
|
@ -479,7 +479,7 @@ class Chroma(VectorStore):
|
|||||||
"Try filtering complex metadata using "
|
"Try filtering complex metadata using "
|
||||||
"langchain_community.vectorstores.utils.filter_complex_metadata."
|
"langchain_community.vectorstores.utils.filter_complex_metadata."
|
||||||
)
|
)
|
||||||
raise ValueError(e.args[0] + "\n\n" + msg)
|
raise ValueError(e.args[0] + "\n\n" + msg) from e
|
||||||
raise e
|
raise e
|
||||||
if empty_ids:
|
if empty_ids:
|
||||||
images_without_metadatas = [b64_texts[j] for j in empty_ids]
|
images_without_metadatas = [b64_texts[j] for j in empty_ids]
|
||||||
@ -566,7 +566,7 @@ class Chroma(VectorStore):
|
|||||||
"Try filtering complex metadata from the document using "
|
"Try filtering complex metadata from the document using "
|
||||||
"langchain_community.vectorstores.utils.filter_complex_metadata."
|
"langchain_community.vectorstores.utils.filter_complex_metadata."
|
||||||
)
|
)
|
||||||
raise ValueError(e.args[0] + "\n\n" + msg)
|
raise ValueError(e.args[0] + "\n\n" + msg) from e
|
||||||
raise e
|
raise e
|
||||||
if empty_ids:
|
if empty_ids:
|
||||||
texts_without_metadatas = [texts[j] for j in empty_ids]
|
texts_without_metadatas = [texts[j] for j in empty_ids]
|
||||||
|
@ -64,6 +64,7 @@ target-version = "py39"
|
|||||||
[tool.ruff.lint]
|
[tool.ruff.lint]
|
||||||
select = [
|
select = [
|
||||||
"A", # flake8-builtins
|
"A", # flake8-builtins
|
||||||
|
"B", # flake8-bugbear
|
||||||
"ASYNC", # flake8-async
|
"ASYNC", # flake8-async
|
||||||
"C4", # flake8-comprehensions
|
"C4", # flake8-comprehensions
|
||||||
"COM", # flake8-commas
|
"COM", # flake8-commas
|
||||||
|
@ -787,7 +787,7 @@ def test_collection_none_after_delete(
|
|||||||
_ = vectorstore._collection
|
_ = vectorstore._collection
|
||||||
with pytest.raises(Exception, match="does not exist"):
|
with pytest.raises(Exception, match="does not exist"):
|
||||||
vectorstore._client.get_collection("test_collection")
|
vectorstore._client.get_collection("test_collection")
|
||||||
with pytest.raises(Exception):
|
with pytest.raises(Exception): # noqa: B017
|
||||||
vectorstore.similarity_search("foo")
|
vectorstore.similarity_search("foo")
|
||||||
|
|
||||||
|
|
||||||
|
@ -418,7 +418,8 @@ class ChatGroq(BaseChatModel):
|
|||||||
warnings.warn(
|
warnings.warn(
|
||||||
f"""WARNING! {field_name} is not default parameter.
|
f"""WARNING! {field_name} is not default parameter.
|
||||||
{field_name} was transferred to model_kwargs.
|
{field_name} was transferred to model_kwargs.
|
||||||
Please confirm that {field_name} is what you intended."""
|
Please confirm that {field_name} is what you intended.""",
|
||||||
|
stacklevel=2,
|
||||||
)
|
)
|
||||||
extra[field_name] = values.pop(field_name)
|
extra[field_name] = values.pop(field_name)
|
||||||
|
|
||||||
|
@ -46,6 +46,7 @@ target-version = "py39"
|
|||||||
[tool.ruff.lint]
|
[tool.ruff.lint]
|
||||||
select = [
|
select = [
|
||||||
"A", # flake8-builtins
|
"A", # flake8-builtins
|
||||||
|
"B", # flake8-bugbear
|
||||||
"ASYNC", # flake8-async
|
"ASYNC", # flake8-async
|
||||||
"C4", # flake8-comprehensions
|
"C4", # flake8-comprehensions
|
||||||
"COM", # flake8-commas
|
"COM", # flake8-commas
|
||||||
|
@ -98,12 +98,12 @@ class HuggingFaceEndpointEmbeddings(BaseModel, Embeddings):
|
|||||||
self.client = client
|
self.client = client
|
||||||
self.async_client = async_client
|
self.async_client = async_client
|
||||||
|
|
||||||
except ImportError:
|
except ImportError as e:
|
||||||
msg = (
|
msg = (
|
||||||
"Could not import huggingface_hub python package. "
|
"Could not import huggingface_hub python package. "
|
||||||
"Please install it with `pip install huggingface_hub`."
|
"Please install it with `pip install huggingface_hub`."
|
||||||
)
|
)
|
||||||
raise ImportError(msg)
|
raise ImportError(msg) from e
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def embed_documents(self, texts: list[str]) -> list[list[float]]:
|
def embed_documents(self, texts: list[str]) -> list[list[float]]:
|
||||||
|
@ -115,12 +115,12 @@ class HuggingFacePipeline(BaseLLM):
|
|||||||
)
|
)
|
||||||
from transformers import pipeline as hf_pipeline # type: ignore[import]
|
from transformers import pipeline as hf_pipeline # type: ignore[import]
|
||||||
|
|
||||||
except ImportError:
|
except ImportError as e:
|
||||||
msg = (
|
msg = (
|
||||||
"Could not import transformers python package. "
|
"Could not import transformers python package. "
|
||||||
"Please install it with `pip install transformers`."
|
"Please install it with `pip install transformers`."
|
||||||
)
|
)
|
||||||
raise ValueError(msg)
|
raise ValueError(msg) from e
|
||||||
|
|
||||||
_model_kwargs = model_kwargs.copy() if model_kwargs else {}
|
_model_kwargs = model_kwargs.copy() if model_kwargs else {}
|
||||||
if device_map is not None:
|
if device_map is not None:
|
||||||
|
@ -60,6 +60,7 @@ target-version = "py39"
|
|||||||
[tool.ruff.lint]
|
[tool.ruff.lint]
|
||||||
select = [
|
select = [
|
||||||
"A", # flake8-builtins
|
"A", # flake8-builtins
|
||||||
|
"B", # flake8-bugbear
|
||||||
"ASYNC", # flake8-async
|
"ASYNC", # flake8-async
|
||||||
"C4", # flake8-comprehensions
|
"C4", # flake8-comprehensions
|
||||||
"COM", # flake8-commas
|
"COM", # flake8-commas
|
||||||
@ -110,7 +111,6 @@ ignore = [
|
|||||||
"RUF012", # Doesn't play well with Pydantic
|
"RUF012", # Doesn't play well with Pydantic
|
||||||
"SLF001", # Private member access
|
"SLF001", # Private member access
|
||||||
"UP007", # pyupgrade: non-pep604-annotation-union
|
"UP007", # pyupgrade: non-pep604-annotation-union
|
||||||
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[tool.coverage.run]
|
[tool.coverage.run]
|
||||||
|
Loading…
Reference in New Issue
Block a user