ruff: more rules across the board & fixes (#31898)

* standardizes ruff dep version across all `pyproject.toml` files
* cli: ruff rules and corrections
* langchain: rules and corrections
This commit is contained in:
Mason Daugherty
2025-07-07 17:48:01 -04:00
committed by GitHub
parent 706a66eccd
commit e7eac27241
408 changed files with 2783 additions and 1671 deletions

View File

@@ -11,7 +11,9 @@ from langchain_cli.utils.packages import get_langserve_export, get_package_root
app = typer.Typer(no_args_is_help=True, add_completion=False)
app.add_typer(
template_namespace.package_cli, name="template", help=template_namespace.__doc__
template_namespace.package_cli,
name="template",
help=template_namespace.__doc__,
)
app.add_typer(app_namespace.app_cli, name="app", help=app_namespace.__doc__)
app.add_typer(
@@ -32,7 +34,7 @@ app.command(
)
def version_callback(show_version: bool) -> None:
def version_callback(show_version: bool) -> None: # noqa: FBT001
if show_version:
typer.echo(f"langchain-cli {__version__}")
raise typer.Exit
@@ -40,8 +42,8 @@ def version_callback(show_version: bool) -> None:
@app.callback()
def main(
version: bool = typer.Option(
False,
version: bool = typer.Option( # noqa: FBT001
False, # noqa: FBT003
"--version",
"-v",
help="Print the current CLI version.",
@@ -56,10 +58,12 @@ def main(
def serve(
*,
port: Annotated[
Optional[int], typer.Option(help="The port to run the server on")
Optional[int],
typer.Option(help="The port to run the server on"),
] = None,
host: Annotated[
Optional[str], typer.Option(help="The host to run the server on")
Optional[str],
typer.Option(help="The host to run the server on"),
] = None,
) -> None:
"""Start the LangServe app, whether it's a template or an app."""

View File

@@ -1,7 +1,7 @@
# type: ignore
"""Development Scripts for template packages."""
from collections.abc import Sequence
from typing import Literal
from fastapi import FastAPI
from langserve import add_routes
@@ -12,9 +12,9 @@ from langchain_cli.utils.packages import get_langserve_export, get_package_root
def create_demo_server(
*,
config_keys: Sequence[str] = (),
playground_type: str = "default",
playground_type: Literal["default", "chat"] = "default",
):
"""Creates a demo server for the current template."""
"""Create a demo server for the current template."""
app = FastAPI()
package_root = get_package_root()
pyproject = package_root / "pyproject.toml"

View File

@@ -39,7 +39,7 @@ lint lint_diff lint_package lint_tests:
format format_diff:
[ "$(PYTHON_FILES)" = "" ] || poetry run ruff format $(PYTHON_FILES)
[ "$(PYTHON_FILES)" = "" ] || poetry run ruff check --select I --fix $(PYTHON_FILES)
[ "$(PYTHON_FILES)" = "" ] || poetry run ruff check --fix $(PYTHON_FILES)
spell_check:
poetry run codespell --toml pyproject.toml

View File

@@ -79,7 +79,9 @@ def new(
package_prompt = "What package would you like to add? (leave blank to skip)"
while True:
package_str = typer.prompt(
package_prompt, default="", show_default=False
package_prompt,
default="",
show_default=False,
)
if not package_str:
break
@@ -121,26 +123,29 @@ def new(
typer.echo("Then add templates with commands like:\n")
typer.echo(" langchain app add extraction-openai-functions")
typer.echo(
" langchain app add git+ssh://git@github.com/efriis/simple-pirate.git\n\n"
" langchain app add git+ssh://git@github.com/efriis/simple-pirate.git\n\n",
)
@app_cli.command()
def add(
dependencies: Annotated[
Optional[list[str]], typer.Argument(help="The dependency to add")
Optional[list[str]],
typer.Argument(help="The dependency to add"),
] = None,
*,
api_path: Annotated[list[str], typer.Option(help="API paths to add")] = [],
project_dir: Annotated[
Optional[Path], typer.Option(help="The project directory")
Optional[Path],
typer.Option(help="The project directory"),
] = None,
repo: Annotated[
list[str],
typer.Option(help="Install templates from a specific github repo instead"),
] = [],
branch: Annotated[
list[str], typer.Option(help="Install templates from a specific branch")
list[str],
typer.Option(help="Install templates from a specific branch"),
] = [],
pip: Annotated[
bool,
@@ -152,13 +157,12 @@ def add(
),
],
) -> None:
"""Adds the specified template to the current LangServe app.
"""Add the specified template to the current LangServe app.
e.g.:
langchain app add extraction-openai-functions
langchain app add git+ssh://git@github.com/efriis/simple-pirate.git
"""
if not branch and not repo:
warnings.warn(
"Adding templates from the default branch and repo is deprecated."
@@ -173,7 +177,7 @@ def add(
package_dir = project_root / "packages"
create_events(
[{"event": "serve add", "properties": {"parsed_dep": d}} for d in parsed_deps]
[{"event": "serve add", "properties": {"parsed_dep": d}} for d in parsed_deps],
)
# group by repo/ref
@@ -248,7 +252,7 @@ def add(
cmd = ["pip", "install", "-e", *installed_destination_strs]
cmd_str = " \\\n ".join(installed_destination_strs)
typer.echo(f"Running: pip install -e \\\n {cmd_str}")
subprocess.run(cmd, cwd=cwd)
subprocess.run(cmd, cwd=cwd) # noqa: S603
chain_names = []
for e in installed_exports:
@@ -296,10 +300,11 @@ def remove(
api_paths: Annotated[list[str], typer.Argument(help="The API paths to remove")],
*,
project_dir: Annotated[
Optional[Path], typer.Option(help="The project directory")
Optional[Path],
typer.Option(help="The project directory"),
] = None,
) -> None:
"""Removes the specified package from the current LangServe app."""
"""Remove the specified package from the current LangServe app."""
project_root = get_package_root(project_dir)
project_pyproject = project_root / "pyproject.toml"
@@ -320,7 +325,7 @@ def remove(
shutil.rmtree(package_dir)
remove_deps.append(api_path)
except Exception:
except Exception: # noqa: S110
pass
try:
@@ -334,16 +339,19 @@ def remove(
def serve(
*,
port: Annotated[
Optional[int], typer.Option(help="The port to run the server on")
Optional[int],
typer.Option(help="The port to run the server on"),
] = None,
host: Annotated[
Optional[str], typer.Option(help="The host to run the server on")
Optional[str],
typer.Option(help="The host to run the server on"),
] = None,
app: Annotated[
Optional[str], typer.Option(help="The app to run, e.g. `app.server:app`")
Optional[str],
typer.Option(help="The app to run, e.g. `app.server:app`"),
] = None,
) -> None:
"""Starts the LangServe app."""
"""Start the LangServe app."""
# add current dir as first entry of path
sys.path.append(str(Path.cwd()))
@@ -353,5 +361,8 @@ def serve(
import uvicorn
uvicorn.run(
app_str, host=host_str, port=port if port is not None else 8000, reload=True
app_str,
host=host_str,
port=port if port is not None else 8000,
reload=True,
)

View File

@@ -66,7 +66,7 @@ def new(
Optional[str],
typer.Option(
help="The name of the integration in PascalCase. e.g. `MyIntegration`."
" This is used to name classes like `MyIntegrationVectorStore`"
" This is used to name classes like `MyIntegrationVectorStore`",
),
] = None,
src: Annotated[
@@ -85,7 +85,7 @@ def new(
),
] = None,
) -> None:
"""Creates a new integration package."""
"""Create a new integration package."""
try:
replacements = _process_name(name)
except ValueError as e:
@@ -96,13 +96,14 @@ def new(
if not re.match(r"^[A-Z][a-zA-Z0-9]*$", name_class):
typer.echo(
"Name should only contain letters (a-z, A-Z), numbers, and underscores"
", and start with a capital letter."
", and start with a capital letter.",
)
raise typer.Exit(code=1)
replacements["__ModuleName__"] = name_class
else:
replacements["__ModuleName__"] = typer.prompt(
"Name of integration in PascalCase", default=replacements["__ModuleName__"]
"Name of integration in PascalCase",
default=replacements["__ModuleName__"],
)
project_template_dir = Path(__file__).parents[1] / "integration_template"
@@ -124,7 +125,7 @@ def new(
# poetry install
subprocess.run(
["poetry", "install", "--with", "lint,test,typing,test_integration"],
["poetry", "install", "--with", "lint,test,typing,test_integration"], # noqa: S607
cwd=destination_dir,
)
else:
@@ -152,7 +153,7 @@ def new(
if len(dst_paths) != len(set(dst_paths)):
typer.echo(
"Duplicate destination paths provided or computed - please "
"specify them explicitly with --dst."
"specify them explicitly with --dst.",
)
raise typer.Exit(code=1)
@@ -224,10 +225,10 @@ def create_doc(
),
] = "docs/docs/integrations/chat/",
) -> None:
"""Creates a new integration doc."""
"""Create a new integration doc."""
if component_type not in TEMPLATE_MAP:
typer.echo(
f"Unrecognized {component_type=}. Expected one of {_component_types_str}."
f"Unrecognized {component_type=}. Expected one of {_component_types_str}.",
)
raise typer.Exit(code=1)

View File

@@ -6,14 +6,17 @@ import pkgutil
def generate_raw_migrations(
from_package: str, to_package: str, filter_by_all: bool = False
from_package: str,
to_package: str,
filter_by_all: bool = False, # noqa: FBT001, FBT002
) -> list[tuple[str, str]]:
"""Scan the `langchain` package and generate migrations for all modules."""
package = importlib.import_module(from_package)
items = []
for _importer, modname, _ispkg in pkgutil.walk_packages(
package.__path__, package.__name__ + "."
package.__path__,
package.__name__ + ".",
):
try:
module = importlib.import_module(modname)
@@ -34,33 +37,35 @@ def generate_raw_migrations(
obj = getattr(module, name, None)
except ImportError:
continue
if obj and (inspect.isclass(obj) or inspect.isfunction(obj)):
if obj.__module__.startswith(to_package):
items.append(
(f"{modname}.{name}", f"{obj.__module__}.{obj.__name__}")
)
if (
obj
and (inspect.isclass(obj) or inspect.isfunction(obj))
and obj.__module__.startswith(to_package)
):
items.append(
(f"{modname}.{name}", f"{obj.__module__}.{obj.__name__}"),
)
if not filter_by_all:
# Iterate over all members of the module
for name, obj in inspect.getmembers(module):
# Check if it's a class or function
if inspect.isclass(obj) or inspect.isfunction(obj):
# Check if the module name of the obj starts with
# 'langchain_community'
if obj.__module__.startswith(to_package):
items.append(
(f"{modname}.{name}", f"{obj.__module__}.{obj.__name__}")
)
# Check if the module name of the obj starts with
# 'langchain_community'
if inspect.isclass(obj) or (
inspect.isfunction(obj) and obj.__module__.startswith(to_package)
):
items.append(
(f"{modname}.{name}", f"{obj.__module__}.{obj.__name__}"),
)
return items
def generate_top_level_imports(pkg: str) -> list[tuple[str, str]]:
"""This code will look at all the top level modules in langchain_community.
"""Look at all the top level modules in langchain_community.
It'll attempt to import everything from each __init__ file
for example,
Attempt to import everything from each ``__init__`` file. For example,
langchain_community/
chat_models/
@@ -74,10 +79,10 @@ def generate_top_level_imports(pkg: str) -> list[tuple[str, str]]:
Each tuple will contain the fully qualified path of the class / function to where
its logic is defined
(e.g., langchain_community.chat_models.xyz_implementation.ver2.XYZ)
(e.g., ``langchain_community.chat_models.xyz_implementation.ver2.XYZ``)
and the second tuple will contain the path
to importing it from the top level namespaces
(e.g., langchain_community.chat_models.XYZ)
(e.g., ``langchain_community.chat_models.XYZ``)
"""
package = importlib.import_module(pkg)
@@ -98,7 +103,7 @@ def generate_top_level_imports(pkg: str) -> list[tuple[str, str]]:
top_level_import = f"{module_name}.{name}"
# Append the tuple with original and top-level paths
items.append(
(f"{original_module}.{original_name}", top_level_import)
(f"{original_module}.{original_name}", top_level_import),
)
# Handle the package itself (root level)
@@ -106,7 +111,8 @@ def generate_top_level_imports(pkg: str) -> list[tuple[str, str]]:
# Only iterate through top-level modules/packages
for _finder, modname, ispkg in pkgutil.iter_modules(
package.__path__, package.__name__ + "."
package.__path__,
package.__name__ + ".",
):
if ispkg:
try:
@@ -119,11 +125,15 @@ def generate_top_level_imports(pkg: str) -> list[tuple[str, str]]:
def generate_simplified_migrations(
from_package: str, to_package: str, filter_by_all: bool = True
from_package: str,
to_package: str,
filter_by_all: bool = True, # noqa: FBT001, FBT002
) -> list[tuple[str, str]]:
"""Get all the raw migrations, then simplify them if possible."""
raw_migrations = generate_raw_migrations(
from_package, to_package, filter_by_all=filter_by_all
from_package,
to_package,
filter_by_all=filter_by_all,
)
top_level_simplifications = generate_top_level_imports(to_package)
top_level_dict = dict(top_level_simplifications)

View File

@@ -17,7 +17,7 @@ def dump_migrations_as_grit(name: str, migration_pairs: list[tuple[str, str]]) -
]
"""
for from_module, to_module in migration_pairs
]
],
)
pattern_name = f"langchain_migrate_{name}"
return f"""

View File

@@ -28,6 +28,7 @@ def get_migrations_for_partner_package(pkg_name: str) -> list[tuple[str, str]]:
Returns:
List of 2-tuples containing old and new import paths.
"""
package = importlib.import_module(pkg_name)
classes_ = find_subclasses_in_module(

View File

@@ -20,7 +20,7 @@ class ImportExtractor(ast.NodeVisitor):
self.imports: list = []
self.package = from_package
def visit_ImportFrom(self, node) -> None:
def visit_ImportFrom(self, node) -> None: # noqa: N802
if node.module and (
self.package is None or str(node.module).startswith(self.package)
):
@@ -39,7 +39,7 @@ def _get_class_names(code: str) -> list[str]:
# Define a node visitor class to collect class names
class ClassVisitor(ast.NodeVisitor):
def visit_ClassDef(self, node) -> None:
def visit_ClassDef(self, node) -> None: # noqa: N802
class_names.append(node.name)
self.generic_visit(node)
@@ -79,7 +79,9 @@ def _get_all_classnames_from_file(file: Path, pkg: str) -> list[tuple[str, str]]
def identify_all_imports_in_file(
file: str, *, from_package: Optional[str] = None
file: str,
*,
from_package: Optional[str] = None,
) -> list[tuple[str, str]]:
"""Let's also identify all the imports in the given file."""
with open(file, encoding="utf-8") as f:
@@ -96,10 +98,13 @@ def identify_pkg_source(pkg_root: str) -> pathlib.Path:
Returns:
Returns the path to the source code for the package.
"""
dirs = [d for d in Path(pkg_root).iterdir() if d.is_dir()]
matching_dirs = [d for d in dirs if d.name.startswith("langchain_")]
assert len(matching_dirs) == 1, "There should be only one langchain package."
if len(matching_dirs) != 1:
msg = "There should be only one langchain package."
raise ValueError(msg)
return matching_dirs[0]
@@ -134,7 +139,9 @@ def list_init_imports_by_package(pkg_root: str) -> list[tuple[str, str]]:
def find_imports_from_package(
code: str, *, from_package: Optional[str] = None
code: str,
*,
from_package: Optional[str] = None,
) -> list[tuple[str, str]]:
# Parse the code into an AST
tree = ast.parse(code)

View File

@@ -4,7 +4,7 @@ from pathlib import Path
import rich
import typer
from gritql import run # type: ignore
from gritql import run # type: ignore[import]
from typer import Option
@@ -17,13 +17,13 @@ def get_gritdir_path() -> Path:
def migrate(
ctx: typer.Context,
# Using diff instead of dry-run for backwards compatibility with the old CLI
diff: bool = Option(
False,
diff: bool = Option( # noqa: FBT001
False, # noqa: FBT003
"--diff",
help="Show the changes that would be made without applying them.",
),
interactive: bool = Option(
False,
interactive: bool = Option( # noqa: FBT001
False, # noqa: FBT003
"--interactive",
help="Prompt for confirmation before making each change",
),
@@ -54,7 +54,7 @@ def migrate(
'⚠️ This script is a "best-effort", and is likely to make some '
"mistakes.\n\n"
"🛡️ Backup your code prior to running the migration script -- it will "
"modify your files!\n\n"
"modify your files!\n\n",
)
rich.print("-" * 10)
rich.print()

View File

@@ -16,12 +16,12 @@ package_cli = typer.Typer(no_args_is_help=True, add_completion=False)
@package_cli.command()
def new(
name: Annotated[str, typer.Argument(help="The name of the folder to create")],
with_poetry: Annotated[
with_poetry: Annotated[ # noqa: FBT002
bool,
typer.Option("--with-poetry/--no-poetry", help="Don't run poetry install"),
] = False,
) -> None:
"""Creates a new template package."""
"""Create a new template package."""
computed_name = name if name != "." else Path.cwd().name
destination_dir = Path.cwd() / name if name != "." else Path.cwd()
@@ -53,8 +53,9 @@ def new(
pyproject_contents = pyproject.read_text()
pyproject.write_text(
pyproject_contents.replace("__package_name__", package_name).replace(
"__module_name__", module_name
)
"__module_name__",
module_name,
),
)
# move module folder
@@ -71,23 +72,26 @@ def new(
readme_contents = readme.read_text()
readme.write_text(
readme_contents.replace("__package_name__", package_name).replace(
"__app_route_code__", app_route_code
)
"__app_route_code__",
app_route_code,
),
)
# poetry install
if with_poetry:
subprocess.run(["poetry", "install"], cwd=destination_dir)
subprocess.run(["poetry", "install"], cwd=destination_dir) # noqa: S607
@package_cli.command()
def serve(
*,
port: Annotated[
Optional[int], typer.Option(help="The port to run the server on")
Optional[int],
typer.Option(help="The port to run the server on"),
] = None,
host: Annotated[
Optional[str], typer.Option(help="The host to run the server on")
Optional[str],
typer.Option(help="The host to run the server on"),
] = None,
configurable: Annotated[
Optional[bool],
@@ -104,7 +108,7 @@ def serve(
),
] = False,
) -> None:
"""Starts a demo app for this template."""
"""Start a demo app for this template."""
# load pyproject.toml
project_dir = get_package_root()
pyproject = project_dir / "pyproject.toml"
@@ -136,7 +140,7 @@ def serve(
@package_cli.command()
def list(contains: Annotated[Optional[str], typer.Argument()] = None) -> None:
def list(contains: Annotated[Optional[str], typer.Argument()] = None) -> None: # noqa: A001
"""List all or search for available templates."""
from langchain_cli.utils.github import list_packages

View File

@@ -20,7 +20,7 @@ def create_events(events: list[EventDict]) -> Optional[Any]:
"properties": event.get("properties"),
}
for event in events
]
],
}
conn = http.client.HTTPSConnection("app.firstpartyhq.com")

View File

@@ -122,7 +122,9 @@ def parse_dependencies(
api_path: list[str],
) -> list[DependencySource]:
num_deps = max(
len(dependencies) if dependencies is not None else 0, len(repo), len(branch)
len(dependencies) if dependencies is not None else 0,
len(repo),
len(branch),
)
if (
(dependencies and len(dependencies) != num_deps)
@@ -143,7 +145,10 @@ def parse_dependencies(
return [
parse_dependency_string(iter_dep, iter_repo, iter_branch, iter_api_path)
for iter_dep, iter_repo, iter_branch, iter_api_path in zip(
inner_deps, inner_repos, inner_branches, inner_api_paths
inner_deps,
inner_repos,
inner_branches,
inner_api_paths,
)
]
@@ -186,7 +191,7 @@ def copy_repo(
source: Path,
destination: Path,
) -> None:
"""Copies a repo, ignoring git folders.
"""Copiy a repo, ignoring git folders.
Raises FileNotFound error if it can't find source
"""

View File

@@ -13,7 +13,9 @@ def list_packages(*, contains: Optional[str] = None):
}
conn.request(
"GET", "/repos/langchain-ai/langchain/contents/templates", headers=headers
"GET",
"/repos/langchain-ai/langchain/contents/templates",
headers=headers,
)
res = conn.getresponse()

View File

@@ -26,6 +26,7 @@ class LangServeExport(TypedDict):
module: The module to import from, tool.langserve.export_module
attr: The attribute to import from the module, tool.langserve.export_attr
package_name: The name of the package, tool.poetry.name
"""
module: str

View File

@@ -14,7 +14,8 @@ def _get_dep_inline_table(path: Path) -> InlineTable:
def add_dependencies_to_pyproject_toml(
pyproject_toml: Path, local_editable_dependencies: Iterable[tuple[str, Path]]
pyproject_toml: Path,
local_editable_dependencies: Iterable[tuple[str, Path]],
) -> None:
"""Add dependencies to pyproject.toml."""
with open(pyproject_toml, encoding="utf-8") as f:
@@ -24,14 +25,15 @@ def add_dependencies_to_pyproject_toml(
{
name: _get_dep_inline_table(loc.relative_to(pyproject_toml.parent))
for name, loc in local_editable_dependencies
}
},
)
with open(pyproject_toml, "w", encoding="utf-8") as f:
dump(pyproject, f)
def remove_dependencies_from_pyproject_toml(
pyproject_toml: Path, local_editable_dependencies: Iterable[str]
pyproject_toml: Path,
local_editable_dependencies: Iterable[str],
) -> None:
"""Remove dependencies from pyproject.toml."""
with open(pyproject_toml, encoding="utf-8") as f:

View File

@@ -49,12 +49,45 @@ exclude = [
[tool.ruff.lint]
select = [
"E", # pycodestyle
"F", # pyflakes
"I", # isort
"T201", # print
"UP", # pyupgrade
"A", # flake8-builtins
"ARG", # flake8-unused-arguments
"ASYNC", # flake8-async
"C4", # flake8-comprehensions
"COM", # flake8-commas
"D", # pydocstyle
"DOC", # pydoclint
"E", # pycodestyle error
"EM", # flake8-errmsg
"F", # pyflakes
"FA", # flake8-future-annotations
"FBT", # flake8-boolean-trap
"FLY", # flake8-flynt
"I", # isort
"ICN", # flake8-import-conventions
"INT", # flake8-gettext
"ISC", # isort-comprehensions
"N", # pep8-naming
"PT", # flake8-pytest-style
"PGH", # pygrep-hooks
"PIE", # flake8-pie
"PERF", # flake8-perf
"PYI", # flake8-pyi
"Q", # flake8-quotes
"RET", # flake8-return
"RSE", # flake8-rst-docstrings
"RUF", # ruff
"S", # flake8-bandit
"SLF", # flake8-self
"SLOT", # flake8-slots
"SIM", # flake8-simplify
"T10", # flake8-debugger
"T20", # flake8-print
"TID", # flake8-tidy-imports
"UP", # pyupgrade
"W", # pycodestyle warning
"YTT", # flake8-2020
]
ignore = ["D100", "D101", "D102", "D103", "D104", "D105", "D107"]
pyupgrade.keep-runtime-typing = true
[tool.mypy]

View File

@@ -1,4 +1,3 @@
# type: ignore
"""Script to generate migrations for the migration script."""
import json
@@ -45,12 +44,17 @@ def cli() -> None:
)
@click.option(
"--format",
"format_",
type=click.Choice(["json", "grit"], case_sensitive=False),
default="json",
help="The output format for the migration script (json or grit).",
)
def generic(
pkg1: str, pkg2: str, output: str, filter_by_all: bool, format: str
pkg1: str,
pkg2: str,
output: str,
filter_by_all: bool, # noqa: FBT001
format_: str,
) -> None:
"""Generate a migration script."""
click.echo("Migration script generated.")
@@ -62,9 +66,9 @@ def generic(
name = f"{pkg1}_to_{pkg2}"
if output is None:
output = f"{name}.json" if format == "json" else f"{name}.grit"
output = f"{name}.json" if format_ == "json" else f"{name}.grit"
if format == "json":
if format_ == "json":
dumped = json.dumps(migrations, indent=2, sort_keys=True)
else:
dumped = dump_migrations_as_grit(name, migrations)

View File

@@ -6,7 +6,7 @@ class File:
self.name = name
self.content = "\n".join(content or [])
def __eq__(self, __value: object) -> bool:
def __eq__(self, __value: object, /) -> bool:
if not isinstance(__value, File):
return NotImplemented

View File

@@ -34,12 +34,12 @@ class Folder:
files.append(cls.from_structure(path))
else:
files.append(
File(path.name, path.read_text(encoding="utf-8").splitlines())
File(path.name, path.read_text(encoding="utf-8").splitlines()),
)
return Folder(name, *files)
def __eq__(self, __value: object) -> bool:
def __eq__(self, __value: object, /) -> bool:
if isinstance(__value, File):
return False

View File

@@ -34,7 +34,7 @@ def find_issue(current: Folder, expected: Folder) -> str:
expected_file.content.splitlines(),
fromfile=current_file.name,
tofile=expected_file.name,
)
),
)
return "Unknown"
@@ -47,8 +47,10 @@ def test_command_line(tmp_path: Path) -> None:
before.create_structure(root=Path(td))
# The input is used to force through the confirmation.
result = runner.invoke(app, ["migrate", before.name, "--force"])
assert result.exit_code == 0, result.output
if result.exit_code != 0:
raise RuntimeError(result.output)
after = Folder.from_structure(Path(td) / before.name)
assert after == expected, find_issue(after, expected)
if after != expected:
raise ValueError(find_issue(after, expected))

View File

@@ -12,14 +12,15 @@ def test_create_json_agent_migration() -> None:
"""Test the migration of create_json_agent from langchain to langchain_community."""
with sup1(), sup2():
raw_migrations = generate_simplified_migrations(
from_package="langchain", to_package="langchain_community"
from_package="langchain",
to_package="langchain_community",
)
json_agent_migrations = [
migration
for migration in raw_migrations
if "create_json_agent" in migration[0]
]
assert json_agent_migrations == [
if json_agent_migrations != [
(
"langchain.agents.create_json_agent",
"langchain_community.agent_toolkits.create_json_agent",
@@ -32,7 +33,9 @@ def test_create_json_agent_migration() -> None:
"langchain.agents.agent_toolkits.json.base.create_json_agent",
"langchain_community.agent_toolkits.create_json_agent",
),
]
]:
msg = "json_agent_migrations did not match the expected value"
raise ValueError(msg)
@pytest.mark.xfail(reason="Unknown reason")
@@ -40,15 +43,21 @@ def test_create_single_store_retriever_db() -> None:
"""Test migration from langchain to langchain_core."""
with sup1(), sup2():
raw_migrations = generate_simplified_migrations(
from_package="langchain", to_package="langchain_core"
from_package="langchain",
to_package="langchain_core",
)
# SingleStore was an old name for VectorStoreRetriever
single_store_migration = [
migration for migration in raw_migrations if "SingleStore" in migration[0]
]
assert single_store_migration == [
if single_store_migration != [
(
"langchain.vectorstores.singlestoredb.SingleStoreDBRetriever",
"langchain_core.vectorstores.VectorStoreRetriever",
),
]
]:
msg = (
"Unexpected migration: single_store_migration does not match expected "
"value"
)
raise ValueError(msg)

View File

@@ -9,7 +9,7 @@ pytest.importorskip(modname="langchain_openai")
def test_generate_migrations() -> None:
migrations = get_migrations_for_partner_package("langchain_openai")
assert migrations == [
if migrations != [
("langchain_community.llms.openai.OpenAI", "langchain_openai.OpenAI"),
("langchain_community.llms.openai.AzureOpenAI", "langchain_openai.AzureOpenAI"),
(
@@ -43,4 +43,6 @@ def test_generate_migrations() -> None:
"langchain_openai.AzureChatOpenAI",
),
("langchain_community.chat_models.ChatOpenAI", "langchain_openai.ChatOpenAI"),
]
]:
msg = "Migrations do not match expected result"
raise ValueError(msg)

View File

@@ -2,4 +2,6 @@ from langchain_cli.namespaces.migrate.generate.utils import PKGS_ROOT
def test_root() -> None:
assert PKGS_ROOT.name == "libs"
if PKGS_ROOT.name != "libs":
msg = "Expected PKGS_ROOT.name to be 'libs'."
raise ValueError(msg)

View File

@@ -5,6 +5,7 @@ from langchain_cli.utils.events import EventDict, create_events
@pytest.mark.xfail(reason="Unknown reason")
def test_create_events() -> None:
assert create_events(
[EventDict(event="Test Event", properties={"test": "test"})]
) == {"status": "success"}
result = create_events([EventDict(event="Test Event", properties={"test": "test"})])
if result != {"status": "success"}:
msg = "Expected {'status': 'success'}, got " + repr(result)
raise ValueError(msg)

View File

@@ -18,17 +18,37 @@ def _assert_dependency_equals(
subdirectory: Optional[str] = None,
event_metadata: Optional[dict] = None,
) -> None:
assert dep["git"] == git
assert dep["ref"] == ref
assert dep["subdirectory"] == subdirectory
if event_metadata is not None:
assert dep["event_metadata"] == event_metadata
if dep["git"] != git:
msg = f"Expected git to be {git} but got {dep['git']}"
raise ValueError(msg)
if dep["ref"] != ref:
msg = f"Expected ref to be {ref} but got {dep['ref']}"
raise ValueError(msg)
if dep["subdirectory"] != subdirectory:
msg = (
f"Expected subdirectory to be {subdirectory} but got {dep['subdirectory']}"
)
raise ValueError(msg)
if dep["subdirectory"] != subdirectory:
msg = (
f"Expected subdirectory to be {subdirectory} but got {dep['subdirectory']}"
)
raise ValueError(msg)
if event_metadata is not None and dep["event_metadata"] != event_metadata:
msg = (
f"Expected event_metadata to be {event_metadata} "
f"but got {dep['event_metadata']}"
)
raise ValueError(msg)
def test_dependency_string() -> None:
_assert_dependency_equals(
parse_dependency_string(
"git+ssh://git@github.com/efriis/myrepo.git", None, None, None
"git+ssh://git@github.com/efriis/myrepo.git",
None,
None,
None,
),
git="ssh://git@github.com/efriis/myrepo.git",
ref=None,
@@ -49,7 +69,10 @@ def test_dependency_string() -> None:
_assert_dependency_equals(
parse_dependency_string(
"git+ssh://git@github.com:efriis/myrepo.git#develop", None, None, None
"git+ssh://git@github.com:efriis/myrepo.git#develop",
None,
None,
None,
),
git="ssh://git@github.com:efriis/myrepo.git",
ref="develop",
@@ -59,7 +82,10 @@ def test_dependency_string() -> None:
# also support a slash in ssh
_assert_dependency_equals(
parse_dependency_string(
"git+ssh://git@github.com/efriis/myrepo.git#develop", None, None, None
"git+ssh://git@github.com/efriis/myrepo.git#develop",
None,
None,
None,
),
git="ssh://git@github.com/efriis/myrepo.git",
ref="develop",
@@ -69,7 +95,10 @@ def test_dependency_string() -> None:
# looks like poetry supports both an @ and a #
_assert_dependency_equals(
parse_dependency_string(
"git+ssh://git@github.com:efriis/myrepo.git@develop", None, None, None
"git+ssh://git@github.com:efriis/myrepo.git@develop",
None,
None,
None,
),
git="ssh://git@github.com:efriis/myrepo.git",
ref="develop",
@@ -100,7 +129,8 @@ def test_dependency_string_both() -> None:
def test_dependency_string_invalids() -> None:
# expect error for wrong order
with pytest.raises(ValueError):
# Bypassing validation since the ValueError message is dynamic
with pytest.raises(ValueError): # noqa: PT011
parse_dependency_string(
"git+https://github.com/efriis/myrepo.git#subdirectory=src@branch",
None,