Compare commits

...

33 Commits

Author SHA1 Message Date
Chester Curme
584a4bd9f6 fix check_prerelease_dependencies script 2025-02-03 16:54:19 -05:00
Chester Curme
0064cc4751 test release with python 3.11 2025-02-03 16:43:31 -05:00
Chester Curme
d9d8735d9f x 2025-02-03 16:27:56 -05:00
Chester Curme
fb4adb444d x 2025-02-03 16:24:54 -05:00
Chester Curme
dd47b8c454 x 2025-02-03 16:21:30 -05:00
Chester Curme
1c634f9436 Revert "test extras"
This reverts commit 7a8d5586de.
2025-02-03 16:17:34 -05:00
Chester Curme
e939cd462a increment version 2025-02-03 16:12:28 -05:00
Chester Curme
bc24f60a39 update release workflows 2025-02-03 16:12:14 -05:00
Chester Curme
7a8d5586de test extras 2025-02-03 16:11:39 -05:00
Chester Curme
3898048b49 add optional dependencies 2025-02-03 15:44:44 -05:00
Chester Curme
cfa56be510 x 2025-02-03 15:21:46 -05:00
Chester Curme
fb79799571 x 2025-02-03 15:05:45 -05:00
Chester Curme
d9ce5f4464 lint 2025-02-03 14:43:05 -05:00
Chester Curme
d6e2af40eb x 2025-02-03 14:35:20 -05:00
Chester Curme
1a22baf999 x 2025-02-03 14:30:55 -05:00
Chester Curme
e20603e225 update dependency tests 2025-02-03 14:17:19 -05:00
Chester Curme
c71fc8b58a x 2025-02-03 13:32:57 -05:00
Chester Curme
1a4af623f1 x 2025-02-03 13:28:36 -05:00
Chester Curme
dfce027c57 update get_min_versions 2025-02-03 13:22:53 -05:00
Chester Curme
701387066c x 2025-02-03 12:59:43 -05:00
Chester Curme
ea838e80c7 x 2025-02-03 12:53:38 -05:00
Chester Curme
4dcc659f48 update scripts (todo: fix) 2025-02-03 12:48:38 -05:00
Chester Curme
dcbedef4d8 update 2025-02-03 12:46:21 -05:00
Chester Curme
070e12f886 update check_diff script 2025-02-03 11:50:58 -05:00
Chester Curme
aa7637b2e4 update workflows 2025-02-03 11:40:39 -05:00
Chester Curme
b7bdf0b81a update get_min_versions script 2025-02-03 11:28:41 -05:00
Chester Curme
e034999cb6 add action to setup uv and python 2025-02-03 11:28:26 -05:00
Chester Curme
a1a6ef10a9 update 2025-02-03 11:23:36 -05:00
Chester Curme
6693a0c80b todo: delete tool.pdm.dev-dependencies 2025-02-03 11:18:22 -05:00
Chester Curme
4202c1411b core 2025-02-03 11:14:44 -05:00
Chester Curme
10f677f2f7 remove tool.poetry 2025-02-03 10:53:46 -05:00
Chester Curme
f71cf387f0 update core 2025-02-03 10:49:46 -05:00
Chester Curme
b07e6be354 update 2025-02-02 14:35:19 -05:00
28 changed files with 17540 additions and 719 deletions

23
.github/actions/uv_setup/action.yml vendored Normal file
View File

@@ -0,0 +1,23 @@
# TODO: https://docs.astral.sh/uv/guides/integration/github/#caching
name: uv-install
description: Set up Python and uv
inputs:
python-version:
description: Python version, supporting MAJOR.MINOR only
required: true
env:
UV_VERSION: "0.5.25"
runs:
using: composite
steps:
- uses: actions/checkout@v4
- name: Install uv and set the python version
uses: astral-sh/setup-uv@v5
with:
version: ${{ env.UV_VERSION }}
python-version: ${{ inputs.python-version }}

View File

@@ -7,6 +7,8 @@ from typing import Dict, List, Set
from pathlib import Path
import tomllib
from packaging.requirements import Requirement
from get_min_versions import get_min_version_from_toml
@@ -55,21 +57,25 @@ def dependents_graph() -> dict:
"""
dependents = defaultdict(set)
for path in glob.glob("./libs/**/pyproject.toml", recursive=True):
# for path in glob.glob("./libs/**/pyproject.toml", recursive=True):
# TODO: fix this
for path in ["./libs/langchain/pyproject.toml", "./libs/core/pyproject.toml"]:
if "template" in path:
continue
# load regular and test deps from pyproject.toml
with open(path, "rb") as f:
pyproject = tomllib.load(f)["tool"]["poetry"]
pyproject = tomllib.load(f)
pkg_dir = "libs" + "/".join(path.split("libs")[1].split("/")[:-1])
for dep in [
*pyproject["dependencies"].keys(),
*pyproject["group"]["test"]["dependencies"].keys(),
*pyproject["project"]["dependencies"],
*pyproject["dependency-groups"]["test"],
]:
requirement = Requirement(dep)
package_name = requirement.name
if "langchain" in dep:
dependents[dep].add(pkg_dir)
dependents[package_name].add(pkg_dir)
continue
# load extended deps from extended_testing_deps.txt
@@ -120,8 +126,7 @@ def _get_configs_for_single_dir(job: str, dir_: str) -> List[Dict[str, str]]:
py_versions = ["3.9", "3.10", "3.11", "3.12", "3.13"]
# custom logic for specific directories
elif dir_ == "libs/partners/milvus":
# milvus poetry doesn't allow 3.12 because they
# declare deps in funny way
# milvus doesn't allow 3.12 because they declare deps in funny way
py_versions = ["3.9", "3.11"]
elif dir_ in PY_312_MAX_PACKAGES:
@@ -148,17 +153,17 @@ def _get_configs_for_single_dir(job: str, dir_: str) -> List[Dict[str, str]]:
def _get_pydantic_test_configs(
dir_: str, *, python_version: str = "3.11"
) -> List[Dict[str, str]]:
with open("./libs/core/poetry.lock", "rb") as f:
core_poetry_lock_data = tomllib.load(f)
for package in core_poetry_lock_data["package"]:
with open("./libs/core/uv.lock", "rb") as f:
core_uv_lock_data = tomllib.load(f)
for package in core_uv_lock_data["package"]:
if package["name"] == "pydantic":
core_max_pydantic_minor = package["version"].split(".")[1]
break
with open(f"./{dir_}/poetry.lock", "rb") as f:
dir_poetry_lock_data = tomllib.load(f)
with open(f"./{dir_}/uv.lock", "rb") as f:
dir_uv_lock_data = tomllib.load(f)
for package in dir_poetry_lock_data["package"]:
for package in dir_uv_lock_data["package"]:
if package["name"] == "pydantic":
dir_max_pydantic_minor = package["version"].split(".")[1]
break
@@ -304,7 +309,7 @@ if __name__ == "__main__":
f"Unknown lib: {file}. check_diff.py likely needs "
"an update for this new library!"
)
elif file.startswith("docs/") or file in ["pyproject.toml", "poetry.lock"]: # docs or root poetry files
elif file.startswith("docs/") or file in ["pyproject.toml", "uv.lock"]: # docs or root uv files
docs_edited = True
dirs_to_run["lint"].add(".")

View File

@@ -10,26 +10,25 @@ if __name__ == "__main__":
toml_data = tomllib.load(file)
# see if we're releasing an rc
version = toml_data["tool"]["poetry"]["version"]
version = toml_data["project"]["version"]
releasing_rc = "rc" in version or "dev" in version
# if not, iterate through dependencies and make sure none allow prereleases
if not releasing_rc:
dependencies = toml_data["tool"]["poetry"]["dependencies"]
for lib in dependencies:
dep_version = dependencies[lib]
if True:#not releasing_rc:
dependencies = toml_data["project"]["dependencies"]
for dep_version in dependencies:
dep_version_string = (
dep_version["version"] if isinstance(dep_version, dict) else dep_version
)
if "rc" in dep_version_string:
raise ValueError(
f"Dependency {lib} has a prerelease version. Please remove this."
f"Dependency {dep_version} has a prerelease version. Please remove this."
)
if isinstance(dep_version, dict) and dep_version.get(
"allow-prereleases", False
):
raise ValueError(
f"Dependency {lib} has allow-prereleases set to true. Please remove this."
f"Dependency {dep_version} has allow-prereleases set to true. Please remove this."
)

View File

@@ -1,3 +1,4 @@
from collections import defaultdict
import sys
from typing import Optional
@@ -7,6 +8,7 @@ else:
# for python 3.10 and below, which doesnt have stdlib tomllib
import tomli as tomllib
from packaging.requirements import Requirement
from packaging.specifiers import SpecifierSet
from packaging.version import Version
@@ -94,6 +96,23 @@ def get_minimum_version(package_name: str, spec_string: str) -> Optional[str]:
return str(min(valid_versions)) if valid_versions else None
def _check_python_version_from_requirement(
requirement: Requirement, python_version: str
) -> bool:
if not requirement.marker:
return True
else:
marker_str = str(requirement.marker)
if "python_version" or "python_full_version" in marker_str:
python_version_str = "".join(
char
for char in marker_str
if char.isdigit() or char in (".", "<", ">", "=", ",")
)
return check_python_version(python_version, python_version_str)
return True
def get_min_version_from_toml(
toml_path: str,
versions_for: str,
@@ -105,8 +124,10 @@ def get_min_version_from_toml(
with open(toml_path, "rb") as file:
toml_data = tomllib.load(file)
# Get the dependencies from tool.poetry.dependencies
dependencies = toml_data["tool"]["poetry"]["dependencies"]
dependencies = defaultdict(list)
for dep in toml_data["project"]["dependencies"]:
requirement = Requirement(dep)
dependencies[requirement.name].append(requirement)
# Initialize a dictionary to store the minimum versions
min_versions = {}
@@ -121,17 +142,11 @@ def get_min_version_from_toml(
if lib in dependencies:
if include and lib not in include:
continue
# Get the version string
version_string = dependencies[lib]
if isinstance(version_string, dict):
version_string = version_string["version"]
if isinstance(version_string, list):
version_string = [
vs
for vs in version_string
if check_python_version(python_version, vs["python"])
][0]["version"]
requirements = dependencies[lib]
for requirement in requirements:
if _check_python_version_from_requirement(requirement, python_version):
version_string = str(requirement.specifier)
break
# Use parse_version to get the minimum supported version from version_string
min_version = get_minimum_version(lib, version_string)

View File

@@ -12,9 +12,6 @@ on:
type: string
description: "Python version to use"
env:
POETRY_VERSION: "1.8.4"
jobs:
build:
defaults:
@@ -22,25 +19,22 @@ jobs:
working-directory: ${{ inputs.working-directory }}
runs-on: ubuntu-latest
timeout-minutes: 20
name: "poetry run pytest -m compile tests/integration_tests #${{ inputs.python-version }}"
name: "uv run pytest -m compile tests/integration_tests #${{ inputs.python-version }}"
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ inputs.python-version }} + Poetry ${{ env.POETRY_VERSION }}
uses: "./.github/actions/poetry_setup"
- name: Set up Python ${{ inputs.python-version }} + uv
uses: "./.github/actions/uv_setup"
with:
python-version: ${{ inputs.python-version }}
poetry-version: ${{ env.POETRY_VERSION }}
working-directory: ${{ inputs.working-directory }}
cache-key: compile-integration
- name: Install integration dependencies
shell: bash
run: poetry install --with=test_integration,test
run: uv sync --group test --group test_integration
- name: Check integration tests compile
shell: bash
run: poetry run pytest -m compile tests/integration_tests
run: uv run pytest -m compile tests/integration_tests
- name: Ensure the tests did not create any additional files
shell: bash

View File

@@ -11,9 +11,6 @@ on:
type: string
description: "Python version to use"
env:
POETRY_VERSION: "1.8.4"
jobs:
build:
defaults:
@@ -24,22 +21,19 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ inputs.python-version }} + Poetry ${{ env.POETRY_VERSION }}
uses: "./.github/actions/poetry_setup"
- name: Set up Python ${{ inputs.python-version }} + uv
uses: "./.github/actions/uv_setup"
with:
python-version: ${{ inputs.python-version }}
poetry-version: ${{ env.POETRY_VERSION }}
working-directory: ${{ inputs.working-directory }}
cache-key: core
- name: Install dependencies
shell: bash
run: poetry install --with test,test_integration
run: uv sync --group test --group test_integration
- name: Install deps outside pyproject
if: ${{ startsWith(inputs.working-directory, 'libs/community/') }}
shell: bash
run: poetry run pip install "boto3<2" "google-cloud-aiplatform<2"
run: uv pip install "boto3<2" "google-cloud-aiplatform<2"
- name: Run integration tests
shell: bash

View File

@@ -13,7 +13,6 @@ on:
description: "Python version to use"
env:
POETRY_VERSION: "1.8.4"
WORKDIR: ${{ inputs.working-directory == '' && '.' || inputs.working-directory }}
# This env var allows us to get inline annotations when ruff has complaints.
@@ -27,25 +26,10 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ inputs.python-version }} + Poetry ${{ env.POETRY_VERSION }}
uses: "./.github/actions/poetry_setup"
- name: Set up Python ${{ inputs.python-version }} + uv
uses: "./.github/actions/uv_setup"
with:
python-version: ${{ inputs.python-version }}
poetry-version: ${{ env.POETRY_VERSION }}
working-directory: ${{ inputs.working-directory }}
cache-key: lint-with-extras
- name: Check Poetry File
shell: bash
working-directory: ${{ inputs.working-directory }}
run: |
poetry check
- name: Check lock file
shell: bash
working-directory: ${{ inputs.working-directory }}
run: |
poetry lock --check
- name: Install dependencies
# Also installs dev/lint/test/typing dependencies, to ensure we have
@@ -58,17 +42,7 @@ jobs:
# It doesn't matter how you change it, any change will cause a cache-bust.
working-directory: ${{ inputs.working-directory }}
run: |
poetry install --with lint,typing
- name: Get .mypy_cache to speed up mypy
uses: actions/cache@v4
env:
SEGMENT_DOWNLOAD_TIMEOUT_MIN: "2"
with:
path: |
${{ env.WORKDIR }}/.mypy_cache
key: mypy-lint-${{ runner.os }}-${{ runner.arch }}-py${{ inputs.python-version }}-${{ inputs.working-directory }}-${{ hashFiles(format('{0}/poetry.lock', inputs.working-directory)) }}
uv sync --group lint --group typing
- name: Analysing the code with our lint
working-directory: ${{ inputs.working-directory }}
@@ -87,21 +61,12 @@ jobs:
if: ${{ ! startsWith(inputs.working-directory, 'libs/partners/') }}
working-directory: ${{ inputs.working-directory }}
run: |
poetry install --with test
uv sync --group test
- name: Install unit+integration test dependencies
if: ${{ startsWith(inputs.working-directory, 'libs/partners/') }}
working-directory: ${{ inputs.working-directory }}
run: |
poetry install --with test,test_integration
- name: Get .mypy_cache_test to speed up mypy
uses: actions/cache@v4
env:
SEGMENT_DOWNLOAD_TIMEOUT_MIN: "2"
with:
path: |
${{ env.WORKDIR }}/.mypy_cache_test
key: mypy-test-${{ runner.os }}-${{ runner.arch }}-py${{ inputs.python-version }}-${{ inputs.working-directory }}-${{ hashFiles(format('{0}/poetry.lock', inputs.working-directory)) }}
uv sync --group test --group test_integration
- name: Analysing the code with our lint
working-directory: ${{ inputs.working-directory }}

View File

@@ -21,7 +21,6 @@ on:
env:
PYTHON_VERSION: "3.11"
POETRY_VERSION: "1.8.4"
jobs:
build:
@@ -36,13 +35,10 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Set up Python + Poetry ${{ env.POETRY_VERSION }}
uses: "./.github/actions/poetry_setup"
- name: Set up Python + uv
uses: "./.github/actions/uv_setup"
with:
python-version: ${{ env.PYTHON_VERSION }}
poetry-version: ${{ env.POETRY_VERSION }}
working-directory: ${{ inputs.working-directory }}
cache-key: release
# We want to keep this build stage *separate* from the release stage,
# so that there's no sharing of permissions between them.
@@ -56,7 +52,7 @@ jobs:
# > from the publish job.
# https://github.com/pypa/gh-action-pypi-publish#non-goals
- name: Build project for distribution
run: poetry build
run: uv build
working-directory: ${{ inputs.working-directory }}
- name: Upload build
@@ -67,11 +63,18 @@ jobs:
- name: Check Version
id: check-version
shell: bash
shell: python
working-directory: ${{ inputs.working-directory }}
run: |
echo pkg-name="$(poetry version | cut -d ' ' -f 1)" >> $GITHUB_OUTPUT
echo version="$(poetry version --short)" >> $GITHUB_OUTPUT
import os
import tomllib
with open("pyproject.toml", "rb") as f:
data = tomllib.load(f)
pkg_name = data["project"]["name"]
version = data["project"]["version"]
with open(os.environ["GITHUB_OUTPUT"], "a") as f:
f.write(f"pkg-name={pkg_name}\n")
f.write(f"version={version}\n")
release-notes:
needs:
- build
@@ -184,13 +187,11 @@ jobs:
# - The package is published, and it breaks on the missing dependency when
# used in the real world.
- name: Set up Python + Poetry ${{ env.POETRY_VERSION }}
uses: "./.github/actions/poetry_setup"
- name: Set up Python + uv
uses: "./.github/actions/uv_setup"
id: setup-python
with:
python-version: ${{ env.PYTHON_VERSION }}
poetry-version: ${{ env.POETRY_VERSION }}
working-directory: ${{ inputs.working-directory }}
- uses: actions/download-artifact@v4
with:
@@ -213,17 +214,18 @@ jobs:
# - attempt install again after 5 seconds if it fails because there is
# sometimes a delay in availability on test pypi
run: |
poetry run pip install dist/*.whl
uv venv
uv run pip install dist/*.whl
# Replace all dashes in the package name with underscores,
# since that's how Python imports packages with dashes in the name.
# also remove _official suffix
IMPORT_NAME="$(echo "$PKG_NAME" | sed s/-/_/g | sed s/_official//g)"
poetry run python -c "import $IMPORT_NAME; print(dir($IMPORT_NAME))"
uv run python -c "import $IMPORT_NAME; print(dir($IMPORT_NAME))"
- name: Import test dependencies
run: poetry install --with test --no-root
run: uv sync --group test
working-directory: ${{ inputs.working-directory }}
# Overwrite the local version of the package with the built version
@@ -234,7 +236,7 @@ jobs:
PKG_NAME: ${{ needs.build.outputs.pkg-name }}
VERSION: ${{ needs.build.outputs.version }}
run: |
poetry run pip install dist/*.whl
uv run pip install dist/*.whl
- name: Run unit tests
run: make tests
@@ -243,15 +245,15 @@ jobs:
- name: Check for prerelease versions
working-directory: ${{ inputs.working-directory }}
run: |
poetry run python $GITHUB_WORKSPACE/.github/scripts/check_prerelease_dependencies.py pyproject.toml
uv run python $GITHUB_WORKSPACE/.github/scripts/check_prerelease_dependencies.py pyproject.toml
- name: Get minimum versions
working-directory: ${{ inputs.working-directory }}
id: min-version
run: |
poetry run pip install packaging requests
python_version="$(poetry run python --version | awk '{print $2}')"
min_versions="$(poetry run python $GITHUB_WORKSPACE/.github/scripts/get_min_versions.py pyproject.toml release $python_version)"
uv run pip install packaging requests
python_version="$(uv run python --version | awk '{print $2}')"
min_versions="$(uv run python $GITHUB_WORKSPACE/.github/scripts/get_min_versions.py pyproject.toml release $python_version)"
echo "min-versions=$min_versions" >> "$GITHUB_OUTPUT"
echo "min-versions=$min_versions"
@@ -260,12 +262,12 @@ jobs:
env:
MIN_VERSIONS: ${{ steps.min-version.outputs.min-versions }}
run: |
poetry run pip install --force-reinstall $MIN_VERSIONS --editable .
uv run pip install --force-reinstall $MIN_VERSIONS --editable .
make tests
working-directory: ${{ inputs.working-directory }}
- name: Import integration test dependencies
run: poetry install --with test,test_integration
run: uv sync --group test --group test_integration
working-directory: ${{ inputs.working-directory }}
- name: Run integration tests
@@ -331,13 +333,10 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Set up Python + Poetry ${{ env.POETRY_VERSION }}
uses: "./.github/actions/poetry_setup"
- name: Set up Python + uv
uses: "./.github/actions/uv_setup"
with:
python-version: ${{ env.PYTHON_VERSION }}
poetry-version: ${{ env.POETRY_VERSION }}
working-directory: ${{ inputs.working-directory }}
cache-key: release
- uses: actions/download-artifact@v4
with:
@@ -373,13 +372,10 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Set up Python + Poetry ${{ env.POETRY_VERSION }}
uses: "./.github/actions/poetry_setup"
- name: Set up Python + uv
uses: "./.github/actions/uv_setup"
with:
python-version: ${{ env.PYTHON_VERSION }}
poetry-version: ${{ env.POETRY_VERSION }}
working-directory: ${{ inputs.working-directory }}
cache-key: release
- uses: actions/download-artifact@v4
with:

View File

@@ -12,9 +12,6 @@ on:
type: string
description: "Python version to use"
env:
POETRY_VERSION: "1.8.4"
jobs:
build:
defaults:
@@ -26,17 +23,14 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ inputs.python-version }} + Poetry ${{ env.POETRY_VERSION }}
uses: "./.github/actions/poetry_setup"
- name: Set up Python ${{ inputs.python-version }} + uv
uses: "./.github/actions/uv_setup"
id: setup-python
with:
python-version: ${{ inputs.python-version }}
poetry-version: ${{ env.POETRY_VERSION }}
working-directory: ${{ inputs.working-directory }}
cache-key: core
- name: Install dependencies
shell: bash
run: poetry install --with test
run: uv sync --group test --dev
- name: Run core tests
shell: bash
@@ -48,9 +42,9 @@ jobs:
id: min-version
shell: bash
run: |
poetry run pip install packaging tomli requests
python_version="$(poetry run python --version | awk '{print $2}')"
min_versions="$(poetry run python $GITHUB_WORKSPACE/.github/scripts/get_min_versions.py pyproject.toml pull_request $python_version)"
uv run pip install packaging tomli requests
python_version="$(uv run python --version | awk '{print $2}')"
min_versions="$(uv run python $GITHUB_WORKSPACE/.github/scripts/get_min_versions.py pyproject.toml pull_request $python_version)"
echo "min-versions=$min_versions" >> "$GITHUB_OUTPUT"
echo "min-versions=$min_versions"
@@ -59,8 +53,7 @@ jobs:
env:
MIN_VERSIONS: ${{ steps.min-version.outputs.min-versions }}
run: |
poetry run pip install uv
poetry run uv pip install $MIN_VERSIONS
uv run pip install $MIN_VERSIONS
make tests
working-directory: ${{ inputs.working-directory }}

View File

@@ -17,9 +17,6 @@ on:
type: string
description: "Pydantic version to test."
env:
POETRY_VERSION: "1.8.4"
jobs:
build:
defaults:
@@ -31,21 +28,18 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ inputs.python-version }} + Poetry ${{ env.POETRY_VERSION }}
uses: "./.github/actions/poetry_setup"
- name: Set up Python ${{ inputs.python-version }} + uv
uses: "./.github/actions/uv_setup"
with:
python-version: ${{ inputs.python-version }}
poetry-version: ${{ env.POETRY_VERSION }}
working-directory: ${{ inputs.working-directory }}
cache-key: core
- name: Install dependencies
shell: bash
run: poetry install --with test
run: uv sync --group test
- name: Overwrite pydantic version
shell: bash
run: poetry run pip install pydantic~=${{ inputs.pydantic-version }}
run: uv pip install pydantic~=${{ inputs.pydantic-version }}
- name: Run core tests
shell: bash

View File

@@ -14,8 +14,7 @@ on:
description: "Release from a non-master branch (danger!)"
env:
POETRY_VERSION: "1.8.4"
PYTHON_VERSION: "3.10"
PYTHON_VERSION: "3.11"
jobs:
build:
@@ -29,13 +28,10 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Set up Python + Poetry ${{ env.POETRY_VERSION }}
uses: "./.github/actions/poetry_setup"
- name: Set up Python + uv
uses: "./.github/actions/uv_setup"
with:
python-version: ${{ env.PYTHON_VERSION }}
poetry-version: ${{ env.POETRY_VERSION }}
working-directory: ${{ inputs.working-directory }}
cache-key: release
# We want to keep this build stage *separate* from the release stage,
# so that there's no sharing of permissions between them.
@@ -49,7 +45,7 @@ jobs:
# > from the publish job.
# https://github.com/pypa/gh-action-pypi-publish#non-goals
- name: Build project for distribution
run: poetry build
run: uv build
working-directory: ${{ inputs.working-directory }}
- name: Upload build
@@ -60,11 +56,18 @@ jobs:
- name: Check Version
id: check-version
shell: bash
shell: python
working-directory: ${{ inputs.working-directory }}
run: |
echo pkg-name="$(poetry version | cut -d ' ' -f 1)" >> $GITHUB_OUTPUT
echo version="$(poetry version --short)" >> $GITHUB_OUTPUT
import os
import tomllib
with open("pyproject.toml", "rb") as f:
data = tomllib.load(f)
pkg_name = data["project"]["name"]
version = data["project"]["version"]
with open(os.environ["GITHUB_OUTPUT"], "a") as f:
f.write(f"pkg-name={pkg_name}\n")
f.write(f"version={version}\n")
publish:
needs:

View File

@@ -17,9 +17,6 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
POETRY_VERSION: "1.8.4"
jobs:
build:
runs-on: ubuntu-latest
@@ -127,21 +124,17 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.job-configs.python-version }} + Poetry ${{ env.POETRY_VERSION }}
uses: "./.github/actions/poetry_setup"
- name: Set up Python ${{ matrix.job-configs.python-version }} + uv
uses: "./.github/actions/uv_setup"
with:
python-version: ${{ matrix.job-configs.python-version }}
poetry-version: ${{ env.POETRY_VERSION }}
working-directory: ${{ matrix.job-configs.working-directory }}
cache-key: extended
- name: Install dependencies
shell: bash
run: |
echo "Running extended tests, installing dependencies with poetry..."
poetry install --with test
poetry run pip install uv
poetry run uv pip install -r extended_testing_deps.txt
echo "Running extended tests, installing dependencies with uv..."
uv sync --group test
uv pip install -r extended_testing_deps.txt
- name: Run extended tests
run: make extended_tests

View File

@@ -1,15 +1,6 @@
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"
[tool.poetry]
name = "langchain-community"
version = "0.3.16"
description = "Community contributed LangChain integrations."
authors = []
license = "MIT"
readme = "README.md"
repository = "https://github.com/langchain-ai/langchain"
requires = ["pdm-backend"]
build-backend = "pdm.backend"
[tool.ruff]
exclude = [
@@ -27,31 +18,6 @@ skip = ".git,*.pdf,*.svg,*.pdf,*.yaml,*.ipynb,poetry.lock,*.min.js,*.css,package
ignore-regex = ".*(Stati Uniti|Tense=Pres).*"
ignore-words-list = "momento,collison,ned,foor,reworkd,parth,whats,aapply,mysogyny,unsecure,damon,crate,aadd,symbl,precesses,accademia,nin,cann"
[tool.poetry.urls]
"Source Code" = "https://github.com/langchain-ai/langchain/tree/master/libs/community"
"Release Notes" = "https://github.com/langchain-ai/langchain/releases?q=tag%3A%22langchain-community%3D%3D0%22&expanded=true"
[tool.poetry.dependencies]
python = ">=3.9,<4.0"
langchain-core = "^0.3.32"
langchain = "^0.3.16"
SQLAlchemy = ">=1.4,<3"
requests = "^2"
PyYAML = ">=5.3"
aiohttp = "^3.8.3"
tenacity = ">=8.1.0,!=8.4.0,<10"
dataclasses-json = ">= 0.5.7, < 0.7"
pydantic-settings = "^2.4.0"
langsmith = ">=0.1.125,<0.4"
httpx-sse = "^0.4.0"
[[tool.poetry.dependencies.numpy]]
version = ">=1.22.4,<2"
python = "<3.12"
[[tool.poetry.dependencies.numpy]]
version = ">=1.26.2,<3"
python = ">=3.12"
[tool.ruff.lint]
select = ["E", "F", "I", "T201"]
@@ -72,100 +38,142 @@ filterwarnings = [
"ignore::langchain_core._api.deprecation.LangChainPendingDeprecationWarning:test",
]
[tool.poetry.group.test]
optional = true
[tool.pdm.dev-dependencies]
test = [
"pytest<8.0.0,>=7.4.4",
"pytest-cov<5.0.0,>=4.1.0",
"pytest-dotenv<1.0.0,>=0.5.2",
"duckdb-engine<1.0.0,>=0.13.6",
"pytest-watcher<1.0.0,>=0.2.6",
"freezegun<2.0.0,>=1.2.2",
"responses<1.0.0,>=0.22.0",
"pytest-asyncio<1.0.0,>=0.20.3",
"lark<2.0.0,>=1.1.5",
"pandas<3.0.0,>=2.0.0",
"pytest-mock<4.0.0,>=3.10.0",
"pytest-socket<1.0.0,>=0.6.0",
"syrupy<5.0.0,>=4.0.2",
"requests-mock<2.0.0,>=1.11.0",
"pytest-xdist<4.0.0,>=3.6.1",
"cffi<1.17.1; python_version < \"3.10\"",
"cffi; python_version >= \"3.10\"",
"langchain-core @ file:///${PROJECT_ROOT}/../core",
"langchain @ file:///${PROJECT_ROOT}/../langchain",
"langchain-tests @ file:///${PROJECT_ROOT}/../standard-tests",
]
codespell = [
"codespell<3.0.0,>=2.2.0",
]
test_integration = [
"pytest-vcr<2.0.0,>=1.0.2",
"vcrpy<7,>=6",
]
lint = [
"ruff<1.0,>=0.5",
"cffi<1.17.1; python_version < \"3.10\"",
"cffi; python_version >= \"3.10\"",
]
dev = [
"jupyter<2.0.0,>=1.0.0",
"setuptools<68.0.0,>=67.6.1",
"langchain-core @ file:///${PROJECT_ROOT}/../core",
]
typing = [
"mypy<2.0,>=1.12",
"types-pyyaml<7.0.0.0,>=6.0.12.2",
"types-requests<3.0.0.0,>=2.28.11.5",
"types-toml<1.0.0.0,>=0.10.8.1",
"types-pytz<2024.0.0.0,>=2023.3.0.0",
"types-chardet<6.0.0.0,>=5.0.4.6",
"types-redis<5.0.0.0,>=4.3.21.6",
"mypy-protobuf<4.0.0,>=3.0.0",
"langchain-core @ file:///${PROJECT_ROOT}/../core",
"langchain-text-splitters @ file:///${PROJECT_ROOT}/../text-splitters",
"langchain @ file:///${PROJECT_ROOT}/../langchain",
]
[tool.poetry.group.codespell]
optional = true
[tool.pdm.build]
includes = []
[tool.poetry.group.test_integration]
optional = true
[project]
authors = []
license = {text = "MIT"}
requires-python = "<4.0,>=3.9"
dependencies = [
"langchain-core<1.0.0,>=0.3.32",
"langchain<1.0.0,>=0.3.16",
"SQLAlchemy<3,>=1.4",
"requests<3,>=2",
"PyYAML>=5.3",
"aiohttp<4.0.0,>=3.8.3",
"tenacity!=8.4.0,<10,>=8.1.0",
"dataclasses-json<0.7,>=0.5.7",
"pydantic-settings<3.0.0,>=2.4.0",
"langsmith<0.4,>=0.1.125",
"httpx-sse<1.0.0,>=0.4.0",
"numpy<2,>=1.22.4; python_version < \"3.12\"",
"numpy<3,>=1.26.2; python_version >= \"3.12\"",
]
name = "langchain-community"
version = "0.3.16"
description = "Community contributed LangChain integrations."
readme = "README.md"
[tool.poetry.group.lint]
optional = true
[project.urls]
"Source Code" = "https://github.com/langchain-ai/langchain/tree/master/libs/community"
"Release Notes" = "https://github.com/langchain-ai/langchain/releases?q=tag%3A%22langchain-community%3D%3D0%22&expanded=true"
repository = "https://github.com/langchain-ai/langchain"
[tool.poetry.group.dev]
optional = true
[tool.poetry.group.test.dependencies]
pytest = "^7.4.4"
pytest-cov = "^4.1.0"
pytest-dotenv = "^0.5.2"
duckdb-engine = "^0.13.6"
pytest-watcher = "^0.2.6"
freezegun = "^1.2.2"
responses = "^0.22.0"
pytest-asyncio = "^0.20.3"
lark = "^1.1.5"
pandas = "^2.0.0"
pytest-mock = "^3.10.0"
pytest-socket = "^0.6.0"
syrupy = "^4.0.2"
requests-mock = "^1.11.0"
pytest-xdist = "^3.6.1"
[[tool.poetry.group.test.dependencies.cffi]]
version = "<1.17.1"
python = "<3.10"
[[tool.poetry.group.test.dependencies.cffi]]
version = "*"
python = ">=3.10"
[tool.poetry.group.codespell.dependencies]
codespell = "^2.2.0"
[tool.poetry.group.test_integration.dependencies]
pytest-vcr = "^1.0.2"
vcrpy = "^6"
[tool.poetry.group.lint.dependencies]
ruff = "^0.5"
[[tool.poetry.group.lint.dependencies.cffi]]
version = "<1.17.1"
python = "<3.10"
[[tool.poetry.group.lint.dependencies.cffi]]
version = "*"
python = ">=3.10"
[tool.poetry.group.dev.dependencies]
jupyter = "^1.0.0"
setuptools = "^67.6.1"
[tool.poetry.group.typing.dependencies]
mypy = "^1.12"
types-pyyaml = "^6.0.12.2"
types-requests = "^2.28.11.5"
types-toml = "^0.10.8.1"
types-pytz = "^2023.3.0.0"
types-chardet = "^5.0.4.6"
types-redis = "^4.3.21.6"
mypy-protobuf = "^3.0.0"
[tool.poetry.group.test.dependencies.langchain-core]
path = "../core"
develop = true
[tool.poetry.group.test.dependencies.langchain]
path = "../langchain"
develop = true
[tool.poetry.group.test.dependencies.langchain-tests]
path = "../standard-tests"
develop = true
[tool.poetry.group.dev.dependencies.langchain-core]
path = "../core"
develop = true
[tool.poetry.group.typing.dependencies.langchain-core]
path = "../core"
develop = true
[tool.poetry.group.typing.dependencies.langchain-text-splitters]
path = "../text-splitters"
develop = true
[tool.poetry.group.typing.dependencies.langchain]
path = "../langchain"
develop = true
[dependency-groups]
test = [
"pytest<8.0.0,>=7.4.4",
"pytest-cov<5.0.0,>=4.1.0",
"pytest-dotenv<1.0.0,>=0.5.2",
"duckdb-engine<1.0.0,>=0.13.6",
"pytest-watcher<1.0.0,>=0.2.6",
"freezegun<2.0.0,>=1.2.2",
"responses<1.0.0,>=0.22.0",
"pytest-asyncio<1.0.0,>=0.20.3",
"lark<2.0.0,>=1.1.5",
"pandas<3.0.0,>=2.0.0",
"pytest-mock<4.0.0,>=3.10.0",
"pytest-socket<1.0.0,>=0.6.0",
"syrupy<5.0.0,>=4.0.2",
"requests-mock<2.0.0,>=1.11.0",
"pytest-xdist<4.0.0,>=3.6.1",
"cffi<1.17.1; python_version < \"3.10\"",
"cffi; python_version >= \"3.10\"",
"langchain-core @ file:///${PROJECT_ROOT}/../core",
"langchain @ file:///${PROJECT_ROOT}/../langchain",
"langchain-tests @ file:///${PROJECT_ROOT}/../standard-tests",
]
codespell = [
"codespell<3.0.0,>=2.2.0",
]
test_integration = [
"pytest-vcr<2.0.0,>=1.0.2",
"vcrpy<7,>=6",
]
lint = [
"ruff<1.0,>=0.5",
"cffi<1.17.1; python_version < \"3.10\"",
"cffi; python_version >= \"3.10\"",
]
dev = [
"jupyter<2.0.0,>=1.0.0",
"setuptools<68.0.0,>=67.6.1",
"langchain-core @ file:///${PROJECT_ROOT}/../core",
]
typing = [
"mypy<2.0,>=1.12",
"types-pyyaml<7.0.0.0,>=6.0.12.2",
"types-requests<3.0.0.0,>=2.28.11.5",
"types-toml<1.0.0.0,>=0.10.8.1",
"types-pytz<2024.0.0.0,>=2023.3.0.0",
"types-chardet<6.0.0.0,>=5.0.4.6",
"types-redis<5.0.0.0,>=4.3.21.6",
"mypy-protobuf<4.0.0,>=3.0.0",
"langchain-core @ file:///${PROJECT_ROOT}/../core",
"langchain-text-splitters @ file:///${PROJECT_ROOT}/../text-splitters",
"langchain @ file:///${PROJECT_ROOT}/../langchain",
]

4153
libs/community/uv.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -12,7 +12,7 @@ test tests:
-u LANGCHAIN_API_KEY \
-u LANGSMITH_TRACING \
-u LANGCHAIN_PROJECT \
poetry run pytest -n auto --disable-socket --allow-unix-socket $(TEST_FILE)
uv run --group test pytest -n auto --disable-socket --allow-unix-socket $(TEST_FILE)
test_watch:
env \
@@ -20,16 +20,16 @@ test_watch:
-u LANGCHAIN_API_KEY \
-u LANGSMITH_TRACING \
-u LANGCHAIN_PROJECT \
poetry run ptw --snapshot-update --now . --disable-socket --allow-unix-socket -- -vv $(TEST_FILE)
uv run --group test ptw --snapshot-update --now . --disable-socket --allow-unix-socket -- -vv $(TEST_FILE)
test_profile:
poetry run pytest -vv tests/unit_tests/ --profile-svg
uv run --group test pytest -vv tests/unit_tests/ --profile-svg
check_imports: $(shell find langchain_core -name '*.py')
poetry run python ./scripts/check_imports.py $^
uv run --group test python ./scripts/check_imports.py $^
extended_tests:
poetry run pytest --only-extended --disable-socket --allow-unix-socket $(TEST_FILE)
uv run --group test pytest --only-extended --disable-socket --allow-unix-socket $(TEST_FILE)
######################
@@ -47,19 +47,19 @@ lint_tests: MYPY_CACHE=.mypy_cache_test
lint lint_diff lint_package lint_tests:
./scripts/lint_imports.sh
[ "$(PYTHON_FILES)" = "" ] || poetry run ruff check $(PYTHON_FILES)
[ "$(PYTHON_FILES)" = "" ] || poetry run ruff format $(PYTHON_FILES) --diff
[ "$(PYTHON_FILES)" = "" ] || mkdir -p $(MYPY_CACHE) && poetry run mypy $(PYTHON_FILES) --cache-dir $(MYPY_CACHE)
[ "$(PYTHON_FILES)" = "" ] || uv run --all-groups ruff check $(PYTHON_FILES)
[ "$(PYTHON_FILES)" = "" ] || uv run --all-groups ruff format $(PYTHON_FILES) --diff
[ "$(PYTHON_FILES)" = "" ] || mkdir -p $(MYPY_CACHE) && uv run --all-groups mypy $(PYTHON_FILES) --cache-dir $(MYPY_CACHE)
format format_diff:
[ "$(PYTHON_FILES)" = "" ] || poetry run ruff format $(PYTHON_FILES)
[ "$(PYTHON_FILES)" = "" ] || poetry run ruff check --fix $(PYTHON_FILES)
[ "$(PYTHON_FILES)" = "" ] || uv run --all-groups ruff format $(PYTHON_FILES)
[ "$(PYTHON_FILES)" = "" ] || uv run --all-groups ruff check --fix $(PYTHON_FILES)
spell_check:
poetry run codespell --toml pyproject.toml
uv run --all-groups codespell --toml pyproject.toml
spell_fix:
poetry run codespell --toml pyproject.toml -w
uv run --all-groups codespell --toml pyproject.toml -w
######################
# HELP

View File

@@ -1,16 +1,70 @@
[build-system]
requires = [ "poetry-core>=1.0.0",]
build-backend = "poetry.core.masonry.api"
requires = ["pdm-backend"]
build-backend = "pdm.backend"
[tool.poetry]
[project]
authors = []
license = {text = "MIT"}
requires-python = "<4.0,>=3.9"
dependencies = [
"langsmith<0.4,>=0.1.125",
"tenacity!=8.4.0,<10.0.0,>=8.1.0",
"jsonpatch<2.0,>=1.33",
"PyYAML>=5.3",
"packaging<25,>=23.2",
"typing-extensions>=4.7",
"pydantic<3.0.0,>=2.5.2; python_full_version < \"3.12.4\"",
"pydantic<3.0.0,>=2.7.4; python_full_version >= \"3.12.4\"",
]
name = "langchain-core"
version = "0.3.33"
description = "Building applications with LLMs through composability"
authors = []
license = "MIT"
readme = "README.md"
[project.urls]
"Source Code" = "https://github.com/langchain-ai/langchain/tree/master/libs/core"
"Release Notes" = "https://github.com/langchain-ai/langchain/releases?q=tag%3A%22langchain-core%3D%3D0%22&expanded=true"
repository = "https://github.com/langchain-ai/langchain"
[dependency-groups]
lint = [
"ruff<1.0.0,>=0.9.2",
]
typing = [
"mypy<1.11,>=1.10",
"types-pyyaml<7.0.0.0,>=6.0.12.2",
"types-requests<3.0.0.0,>=2.28.11.5",
"types-jinja2<3.0.0,>=2.11.9",
"langchain-text-splitters",
]
dev = [
"jupyter<2.0.0,>=1.0.0",
"setuptools<68.0.0,>=67.6.1",
"grandalf<1.0,>=0.8",
]
test = [
"pytest<9,>=8",
"freezegun<2.0.0,>=1.2.2",
"pytest-mock<4.0.0,>=3.10.0",
"syrupy<5.0.0,>=4.0.2",
"pytest-watcher<1.0.0,>=0.3.4",
"pytest-asyncio<1.0.0,>=0.21.1",
"grandalf<1.0,>=0.8",
"responses<1.0.0,>=0.25.0",
"pytest-socket<1.0.0,>=0.7.0",
"pytest-xdist<4.0.0,>=3.6.1",
"blockbuster~=1.5.11",
"numpy<2.0.0,>=1.24.0; python_version < \"3.12\"",
"numpy<3,>=1.26.0; python_version >= \"3.12\"",
"langchain-tests",
]
test_integration = []
[tool.uv.sources]
langchain-tests = { path = "../standard-tests" }
langchain-text-splitters = { path = "../text-splitters" }
[tool.mypy]
exclude = [ "notebooks", "examples", "example_data", "langchain_core/pydantic", "tests/unit_tests/utils/test_function_calling.py",]
disallow_untyped_defs = "True"
@@ -21,27 +75,6 @@ ignore_missing_imports = true
[tool.ruff]
target-version = "py39"
[tool.poetry.urls]
"Source Code" = "https://github.com/langchain-ai/langchain/tree/master/libs/core"
"Release Notes" = "https://github.com/langchain-ai/langchain/releases?q=tag%3A%22langchain-core%3D%3D0%22&expanded=true"
[tool.poetry.dependencies]
python = ">=3.9,<4.0"
langsmith = ">=0.1.125,<0.4"
tenacity = ">=8.1.0,!=8.4.0,<10.0.0"
jsonpatch = "^1.33"
PyYAML = ">=5.3"
packaging = ">=23.2,<25"
typing-extensions = ">=4.7"
[[tool.poetry.dependencies.pydantic]]
version = "^2.5.2"
python = "<3.12.4"
[[tool.poetry.dependencies.pydantic]]
version = "^2.7.4"
python = ">=3.12.4"
[tool.poetry.extras]
[tool.ruff.lint]
select = [ "ASYNC", "B", "C4", "COM", "DJ", "E", "EM", "EXE", "F", "FLY", "FURB", "I", "ICN", "INT", "LOG", "N", "NPY", "PD", "PIE", "Q", "RSE", "S", "SIM", "SLOT", "T10", "T201", "TID", "TRY", "UP", "W", "YTT",]
@@ -56,21 +89,6 @@ markers = [ "requires: mark tests as requiring a specific library", "compile: ma
asyncio_mode = "auto"
filterwarnings = [ "ignore::langchain_core._api.beta_decorator.LangChainBetaWarning",]
[tool.poetry.group.lint]
optional = true
[tool.poetry.group.typing]
optional = true
[tool.poetry.group.dev]
optional = true
[tool.poetry.group.test]
optional = true
[tool.poetry.group.test_integration]
optional = true
[tool.ruff.lint.pep8-naming]
classmethod-decorators = [ "classmethod", "langchain_core.utils.pydantic.pre_init", "pydantic.field_validator", "pydantic.v1.root_validator",]
@@ -80,71 +98,3 @@ classmethod-decorators = [ "classmethod", "langchain_core.utils.pydantic.pre_ini
"tests/unit_tests/runnables/test_graph.py" = [ "E501",]
"tests/**" = [ "S",]
"scripts/**" = [ "S",]
[tool.poetry.group.lint.dependencies]
ruff = "^0.9.2"
[tool.poetry.group.typing.dependencies]
mypy = ">=1.10,<1.11"
types-pyyaml = "^6.0.12.2"
types-requests = "^2.28.11.5"
types-jinja2 = "^2.11.9"
[tool.poetry.group.dev.dependencies]
jupyter = "^1.0.0"
setuptools = "^67.6.1"
grandalf = "^0.8"
[tool.poetry.group.test.dependencies]
pytest = "^8"
freezegun = "^1.2.2"
pytest-mock = "^3.10.0"
syrupy = "^4.0.2"
pytest-watcher = "^0.3.4"
pytest-asyncio = "^0.21.1"
grandalf = "^0.8"
responses = "^0.25.0"
pytest-socket = "^0.7.0"
pytest-xdist = "^3.6.1"
blockbuster = "~1.5.11"
[[tool.poetry.group.test.dependencies.numpy]]
version = "^1.24.0"
python = "<3.12"
[[tool.poetry.group.test.dependencies.numpy]]
version = ">=1.26.0,<3"
python = ">=3.12"
[tool.poetry.group.test_integration.dependencies]
[tool.poetry.group.typing.dependencies.langchain-text-splitters]
path = "../text-splitters"
develop = true
[tool.poetry.group.test.dependencies.langchain-tests]
path = "../standard-tests"
develop = true

2707
libs/core/uv.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -12,33 +12,33 @@ TEST_FILE ?= tests/unit_tests/
# Run unit tests and generate a coverage report.
coverage:
poetry run pytest --cov \
uv run --group test pytest --cov \
--cov-config=.coveragerc \
--cov-report xml \
--cov-report term-missing:skip-covered \
$(TEST_FILE)
test tests:
poetry run pytest -n auto --disable-socket --allow-unix-socket $(TEST_FILE)
uv run --group test pytest -n auto --disable-socket --allow-unix-socket $(TEST_FILE)
extended_tests:
poetry run pytest --disable-socket --allow-unix-socket --only-extended tests/unit_tests
uv run --group test pytest --disable-socket --allow-unix-socket --only-extended tests/unit_tests
test_watch:
poetry run ptw --snapshot-update --now . -- -x --disable-socket --allow-unix-socket --disable-warnings tests/unit_tests
uv run --group test ptw --snapshot-update --now . -- -x --disable-socket --allow-unix-socket --disable-warnings tests/unit_tests
test_watch_extended:
poetry run ptw --snapshot-update --now . -- -x --disable-socket --allow-unix-socket --only-extended tests/unit_tests
uv run --group test ptw --snapshot-update --now . -- -x --disable-socket --allow-unix-socket --only-extended tests/unit_tests
integration_tests:
poetry run pytest tests/integration_tests
uv run --group test_integration pytest tests/integration_tests
docker_tests:
docker build -t my-langchain-image:test .
docker run --rm my-langchain-image:test
check_imports: $(shell find langchain -name '*.py')
poetry run python ./scripts/check_imports.py $^
uv run python ./scripts/check_imports.py $^
######################
# LINTING AND FORMATTING
@@ -55,19 +55,19 @@ lint_tests: MYPY_CACHE=.mypy_cache_test
lint lint_diff lint_package lint_tests:
./scripts/lint_imports.sh
[ "$(PYTHON_FILES)" = "" ] || poetry run ruff check $(PYTHON_FILES)
[ "$(PYTHON_FILES)" = "" ] || poetry run ruff format $(PYTHON_FILES) --diff
[ "$(PYTHON_FILES)" = "" ] || mkdir -p $(MYPY_CACHE) && poetry run mypy $(PYTHON_FILES) --cache-dir $(MYPY_CACHE)
[ "$(PYTHON_FILES)" = "" ] || uv run --all-groups ruff check $(PYTHON_FILES)
[ "$(PYTHON_FILES)" = "" ] || uv run --all-groups ruff format $(PYTHON_FILES) --diff
[ "$(PYTHON_FILES)" = "" ] || mkdir -p $(MYPY_CACHE) && uv run --all-groups mypy $(PYTHON_FILES) --cache-dir $(MYPY_CACHE)
format format_diff:
[ "$(PYTHON_FILES)" = "" ] || poetry run ruff format $(PYTHON_FILES)
[ "$(PYTHON_FILES)" = "" ] || poetry run ruff check --select I --fix $(PYTHON_FILES)
[ "$(PYTHON_FILES)" = "" ] || uv run --all-groups ruff format $(PYTHON_FILES)
[ "$(PYTHON_FILES)" = "" ] || uv run --all-groups ruff check --select I --fix $(PYTHON_FILES)
spell_check:
poetry run codespell --toml pyproject.toml
uv run --all-groups codespell --toml pyproject.toml
spell_fix:
poetry run codespell --toml pyproject.toml -w
uv run --all-groups codespell --toml pyproject.toml -w
######################
# HELP

View File

@@ -28,7 +28,7 @@ from typing_extensions import Self
if TYPE_CHECKING:
import openai
from openai.types.beta.threads import ThreadMessage
from openai.types.beta.threads import ThreadMessage # type: ignore[attr-defined]
from openai.types.beta.threads.required_action_function_tool_call import (
RequiredActionFunctionToolCall,
)
@@ -590,7 +590,8 @@ class OpenAIAssistantRunnable(RunnableSerializable[Dict, OutputType]):
isinstance(content, openai.types.beta.threads.TextContentBlock)
if version_gte_1_14
else isinstance(
content, openai.types.beta.threads.MessageContentText
content,
openai.types.beta.threads.MessageContentText, # type: ignore[attr-defined]
)
)
for content in answer
@@ -743,7 +744,8 @@ class OpenAIAssistantRunnable(RunnableSerializable[Dict, OutputType]):
isinstance(content, openai.types.beta.threads.TextContentBlock)
if version_gte_1_14
else isinstance(
content, openai.types.beta.threads.MessageContentText
content,
openai.types.beta.threads.MessageContentText, # type: ignore[attr-defined]
)
)
for content in answer

View File

@@ -236,7 +236,9 @@ class FlareChain(Chain):
"Please install langchain-openai."
"pip install langchain-openai"
)
llm = ChatOpenAI(max_tokens=max_generation_len, logprobs=True, temperature=0)
llm = ChatOpenAI(
max_completion_tokens=max_generation_len, logprobs=True, temperature=0
)
response_chain = PROMPT | llm
question_gen_chain = QUESTION_GENERATOR_PROMPT | llm | StrOutputParser()
return cls(

View File

@@ -65,7 +65,7 @@ class OpenAIModerationChain(Chain):
except ValueError:
values["openai_pre_1_0"] = True
if values["openai_pre_1_0"]:
values["client"] = openai.Moderation
values["client"] = openai.Moderation # type: ignore[attr-defined]
else:
values["client"] = openai.OpenAI(api_key=openai_api_key)
values["async_client"] = openai.AsyncOpenAI(api_key=openai_api_key)

View File

@@ -30,7 +30,9 @@ def _embedding_factory() -> Embeddings:
from langchain_openai import OpenAIEmbeddings
except ImportError:
try:
from langchain_community.embeddings.openai import OpenAIEmbeddings
from langchain_community.embeddings.openai import ( # type: ignore[no-redef]
OpenAIEmbeddings,
)
except ImportError:
raise ImportError(
"Could not import OpenAIEmbeddings. Please install the "
@@ -89,7 +91,9 @@ class _EmbeddingDistanceChainMixin(Chain):
pass
try:
from langchain_community.embeddings.openai import OpenAIEmbeddings
from langchain_community.embeddings.openai import ( # type: ignore[no-redef]
OpenAIEmbeddings,
)
types_.append(OpenAIEmbeddings)
except ImportError:

View File

@@ -135,7 +135,9 @@ def load_evaluator(
from langchain_openai import ChatOpenAI
except ImportError:
try:
from langchain_community.chat_models.openai import ChatOpenAI
from langchain_community.chat_models.openai import ( # type: ignore[no-redef]
ChatOpenAI,
)
except ImportError:
raise ImportError(
"Could not import langchain_openai or fallback onto "

View File

@@ -1,16 +1,119 @@
[build-system]
requires = [ "poetry-core>=1.0.0",]
build-backend = "poetry.core.masonry.api"
requires = ["pdm-backend"]
build-backend = "pdm.backend"
[tool.poetry]
name = "langchain"
version = "0.3.17"
description = "Building applications with LLMs through composability"
[project]
authors = []
license = "MIT"
license = {text = "MIT"}
requires-python = "<4.0,>=3.9"
dependencies = [
"langchain-core<1.0.0,>=0.3.33",
"langchain-text-splitters<1.0.0,>=0.3.3",
"langsmith<0.4,>=0.1.17",
"pydantic<3.0.0,>=2.7.4",
"SQLAlchemy<3,>=1.4",
"requests<3,>=2",
"PyYAML>=5.3",
"aiohttp<4.0.0,>=3.8.3",
"tenacity!=8.4.0,<10,>=8.1.0",
"numpy<2,>=1.26.4; python_version < \"3.12\"",
"numpy<3,>=1.26.2; python_version >= \"3.12\"",
"async-timeout<5.0.0,>=4.0.0; python_version < \"3.11\"",
]
name = "langchain"
version = "0.3.18rc1"
description = "Building applications with LLMs through composability"
readme = "README.md"
[project.optional-dependencies]
community = ["langchain-community"]
anthropic = ["langchain-anthropic"]
openai = ["langchain-openai"]
cohere = ["langchain-cohere"]
google-vertexai = ["langchain-google-vertexai"]
google-genai = ["langchain-google-genai"]
fireworks = ["langchain-fireworks"]
ollama = ["langchain-ollama"]
together = ["langchain-together"]
mistralai = ["langchain-mistralai"]
huggingface = ["langchain-huggingface"]
groq = ["langchain-groq"]
aws = ["langchain-aws"]
deepseek = ["langchain-deepseek"]
[project.urls]
"Source Code" = "https://github.com/langchain-ai/langchain/tree/master/libs/langchain"
"Release Notes" = "https://github.com/langchain-ai/langchain/releases?q=tag%3A%22langchain%3D%3D0%22&expanded=true"
repository = "https://github.com/langchain-ai/langchain"
[project.scripts]
langchain-server = "langchain.server:main"
[dependency-groups]
test = [
"pytest<9,>=8",
"pytest-cov<5.0.0,>=4.0.0",
"pytest-dotenv<1.0.0,>=0.5.2",
"duckdb-engine<1.0.0,>=0.9.2",
"pytest-watcher<1.0.0,>=0.2.6",
"freezegun<2.0.0,>=1.2.2",
"responses<1.0.0,>=0.22.0",
"pytest-asyncio<1.0.0,>=0.23.2",
"lark<2.0.0,>=1.1.5",
"pandas<3.0.0,>=2.0.0",
"pytest-mock<4.0.0,>=3.10.0",
"pytest-socket<1.0.0,>=0.6.0",
"syrupy<5.0.0,>=4.0.2",
"requests-mock<2.0.0,>=1.11.0",
"pytest-xdist<4.0.0,>=3.6.1",
"cffi<1.17.1; python_version < \"3.10\"",
"cffi; python_version >= \"3.10\"",
"langchain-tests @ file:///${PROJECT_ROOT}/../standard-tests",
"langchain-core @ file:///${PROJECT_ROOT}/../core",
"langchain-text-splitters @ file:///${PROJECT_ROOT}/../text-splitters",
"langchain-openai @ file:///${PROJECT_ROOT}/../partners/openai",
"toml>=0.10.2",
"packaging>=24.2",
]
codespell = [
"codespell<3.0.0,>=2.2.0",
]
test_integration = [
"pytest-vcr<2.0.0,>=1.0.2",
"urllib3<2; python_version < \"3.10\"",
"wrapt<2.0.0,>=1.15.0",
"python-dotenv<2.0.0,>=1.0.0",
"cassio<1.0.0,>=0.1.0",
"langchainhub<1.0.0,>=0.1.16",
"langchain-core @ file:///${PROJECT_ROOT}/../core",
"langchain-text-splitters @ file:///${PROJECT_ROOT}/../text-splitters",
]
lint = [
"ruff<1.0.0,>=0.9.2",
"cffi<1.17.1; python_version < \"3.10\"",
"cffi; python_version >= \"3.10\"",
]
typing = [
"mypy<2.0,>=1.10",
"types-pyyaml<7.0.0.0,>=6.0.12.2",
"types-requests<3.0.0.0,>=2.28.11.5",
"types-toml<1.0.0.0,>=0.10.8.1",
"types-redis<5.0.0.0,>=4.3.21.6",
"types-pytz<2024.0.0.0,>=2023.3.0.0",
"types-chardet<6.0.0.0,>=5.0.4.6",
"mypy-protobuf<4.0.0,>=3.0.0",
"langchain-core @ file:///${PROJECT_ROOT}/../core",
"langchain-text-splitters @ file:///${PROJECT_ROOT}/../text-splitters",
]
dev = [
"jupyter<2.0.0,>=1.0.0",
"playwright<2.0.0,>=1.28.0",
"setuptools<68.0.0,>=67.6.1",
"langchain-core @ file:///${PROJECT_ROOT}/../core",
"langchain-text-splitters @ file:///${PROJECT_ROOT}/../text-splitters",
]
[tool.ruff]
target-version = "py39"
exclude = [ "tests/integration_tests/examples/non-utf8-encoding.py",]
@@ -25,32 +128,6 @@ skip = ".git,*.pdf,*.svg,*.pdf,*.yaml,*.ipynb,poetry.lock,*.min.js,*.css,package
ignore-regex = ".*(Stati Uniti|Tense=Pres).*"
ignore-words-list = "momento,collison,ned,foor,reworkd,parth,whats,aapply,mysogyny,unsecure,damon,crate,aadd,symbl,precesses,accademia,nin"
[tool.poetry.urls]
"Source Code" = "https://github.com/langchain-ai/langchain/tree/master/libs/langchain"
"Release Notes" = "https://github.com/langchain-ai/langchain/releases?q=tag%3A%22langchain%3D%3D0%22&expanded=true"
[tool.poetry.scripts]
langchain-server = "langchain.server:main"
[tool.poetry.dependencies]
python = ">=3.9,<4.0"
langchain-core = "^0.3.33"
langchain-text-splitters = "^0.3.3"
langsmith = ">=0.1.17,<0.4"
pydantic = "^2.7.4"
SQLAlchemy = ">=1.4,<3"
requests = "^2"
PyYAML = ">=5.3"
aiohttp = "^3.8.3"
tenacity = ">=8.1.0,!=8.4.0,<10"
[[tool.poetry.dependencies.numpy]]
version = ">=1.22.4,<2"
python = "<3.12"
[[tool.poetry.dependencies.numpy]]
version = ">=1.26.2,<3"
python = ">=3.12"
[tool.ruff.lint]
select = [ "E", "F", "I", "T201", "D",]
pydocstyle = { convention = "google" }
@@ -67,125 +144,3 @@ addopts = "--strict-markers --strict-config --durations=5 --snapshot-warn-unused
markers = [ "requires: mark tests as requiring a specific library", "scheduled: mark tests to run in scheduled testing", "compile: mark placeholder test used to compile integration tests without running them",]
asyncio_mode = "auto"
filterwarnings = [ "ignore::langchain_core._api.beta_decorator.LangChainBetaWarning", "ignore::langchain_core._api.deprecation.LangChainDeprecationWarning:tests", "ignore::langchain_core._api.deprecation.LangChainPendingDeprecationWarning:tests",]
[tool.poetry.dependencies.async-timeout]
version = "^4.0.0"
python = "<3.11"
[tool.poetry.group.test]
optional = true
[tool.poetry.group.codespell]
optional = true
[tool.poetry.group.test_integration]
optional = true
[tool.poetry.group.lint]
optional = true
[tool.poetry.group.typing]
optional = true
[tool.poetry.group.dev]
optional = true
[tool.poetry.group.test.dependencies]
pytest = "^8"
pytest-cov = "^4.0.0"
pytest-dotenv = "^0.5.2"
duckdb-engine = "^0.9.2"
pytest-watcher = "^0.2.6"
freezegun = "^1.2.2"
responses = "^0.22.0"
pytest-asyncio = "^0.23.2"
lark = "^1.1.5"
pandas = "^2.0.0"
pytest-mock = "^3.10.0"
pytest-socket = "^0.6.0"
syrupy = "^4.0.2"
requests-mock = "^1.11.0"
pytest-xdist = "^3.6.1"
[[tool.poetry.group.test.dependencies.cffi]]
version = "<1.17.1"
python = "<3.10"
[[tool.poetry.group.test.dependencies.cffi]]
version = "*"
python = ">=3.10"
[tool.poetry.group.codespell.dependencies]
codespell = "^2.2.0"
[tool.poetry.group.test_integration.dependencies]
pytest-vcr = "^1.0.2"
wrapt = "^1.15.0"
python-dotenv = "^1.0.0"
cassio = "^0.1.0"
langchainhub = "^0.1.16"
[tool.poetry.group.lint.dependencies]
ruff = "^0.9.2"
[[tool.poetry.group.lint.dependencies.cffi]]
version = "<1.17.1"
python = "<3.10"
[[tool.poetry.group.lint.dependencies.cffi]]
version = "*"
python = ">=3.10"
[tool.poetry.group.typing.dependencies]
mypy = "^1.10"
types-pyyaml = "^6.0.12.2"
types-requests = "^2.28.11.5"
types-toml = "^0.10.8.1"
types-redis = "^4.3.21.6"
types-pytz = "^2023.3.0.0"
types-chardet = "^5.0.4.6"
mypy-protobuf = "^3.0.0"
[tool.poetry.group.dev.dependencies]
jupyter = "^1.0.0"
playwright = "^1.28.0"
setuptools = "^67.6.1"
[tool.poetry.group.test.dependencies.langchain-tests]
path = "../standard-tests"
develop = true
[tool.poetry.group.test.dependencies.langchain-core]
path = "../core"
develop = true
[tool.poetry.group.test.dependencies.langchain-text-splitters]
path = "../text-splitters"
develop = true
[tool.poetry.group.test.dependencies.langchain-openai]
path = "../partners/openai"
optional = true
develop = true
[tool.poetry.group.test_integration.dependencies.langchain-core]
path = "../core"
develop = true
[tool.poetry.group.test_integration.dependencies.langchain-text-splitters]
path = "../text-splitters"
develop = true
[tool.poetry.group.typing.dependencies.langchain-core]
path = "../core"
develop = true
[tool.poetry.group.typing.dependencies.langchain-text-splitters]
path = "../text-splitters"
develop = true
[tool.poetry.group.dev.dependencies.langchain-core]
path = "../core"
develop = true
[tool.poetry.group.dev.dependencies.langchain-text-splitters]
path = "../text-splitters"
develop = true

View File

@@ -5,6 +5,7 @@ from typing import Any, Dict, Mapping
import pytest
import toml
from packaging.requirements import Requirement
HERE = Path(__file__).parent
@@ -12,30 +13,21 @@ PYPROJECT_TOML = HERE / "../../pyproject.toml"
@pytest.fixture()
def poetry_conf() -> Dict[str, Any]:
def uv_conf() -> Dict[str, Any]:
"""Load the pyproject.toml file."""
with open(PYPROJECT_TOML) as f:
return toml.load(f)["tool"]["poetry"]
return toml.load(f)
def test_required_dependencies(poetry_conf: Mapping[str, Any]) -> None:
def test_required_dependencies(uv_conf: Mapping[str, Any]) -> None:
"""A test that checks if a new non-optional dependency is being introduced.
If this test is triggered, it means that a contributor is trying to introduce a new
required dependency. This should be avoided in most situations.
"""
# Get the dependencies from the [tool.poetry.dependencies] section
dependencies = poetry_conf["dependencies"]
is_required = {
package_name: isinstance(requirements, str)
or isinstance(requirements, list)
or not requirements.get("optional", False)
for package_name, requirements in dependencies.items()
}
required_dependencies = [
package_name for package_name, required in is_required.items() if required
]
dependencies = uv_conf["project"]["dependencies"]
required_dependencies = set(Requirement(dep).name for dep in dependencies)
assert sorted(required_dependencies) == sorted(
[
@@ -48,22 +40,13 @@ def test_required_dependencies(poetry_conf: Mapping[str, Any]) -> None:
"langsmith",
"numpy",
"pydantic",
"python",
"requests",
"tenacity",
]
)
unrequired_dependencies = [
package_name for package_name, required in is_required.items() if not required
]
in_extras = [
dep for group in poetry_conf.get("extras", {}).values() for dep in group
]
assert set(unrequired_dependencies) == set(in_extras)
def test_test_group_dependencies(poetry_conf: Mapping[str, Any]) -> None:
def test_test_group_dependencies(uv_conf: Mapping[str, Any]) -> None:
"""Check if someone is attempting to add additional test dependencies.
Only dependencies associated with test running infrastructure should be added
@@ -72,9 +55,10 @@ def test_test_group_dependencies(poetry_conf: Mapping[str, Any]) -> None:
Examples of dependencies that should NOT be included: boto3, azure, postgres, etc.
"""
test_group_deps = sorted(poetry_conf["group"]["test"]["dependencies"])
dependencies = uv_conf["dependency-groups"]["test"]
test_group_deps = set(Requirement(dep).name for dep in dependencies)
assert test_group_deps == sorted(
assert sorted(test_group_deps) == sorted(
[
"duckdb-engine",
"freezegun",
@@ -83,6 +67,7 @@ def test_test_group_dependencies(poetry_conf: Mapping[str, Any]) -> None:
"langchain-text-splitters",
"langchain-openai",
"lark",
"packaging",
"pandas",
"pytest",
"pytest-asyncio",
@@ -94,6 +79,7 @@ def test_test_group_dependencies(poetry_conf: Mapping[str, Any]) -> None:
"pytest-xdist",
"responses",
"syrupy",
"toml",
"requests-mock",
# TODO: temporary hack since cffi 1.17.1 doesn't work with py 3.9.
"cffi",

6112
libs/langchain/uv.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,15 +1,6 @@
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"
[tool.poetry]
name = "langchain-text-splitters"
version = "0.3.5"
description = "LangChain text splitting utilities"
authors = []
license = "MIT"
readme = "README.md"
repository = "https://github.com/langchain-ai/langchain"
requires = ["pdm-backend"]
build-backend = "pdm.backend"
[tool.mypy]
disallow_untyped_defs = "True"
@@ -30,14 +21,6 @@ module = [
]
ignore_missing_imports = "True"
[tool.poetry.urls]
"Source Code" = "https://github.com/langchain-ai/langchain/tree/master/libs/text-splitters"
"Release Notes" = "https://github.com/langchain-ai/langchain/releases?q=tag%3A%22langchain-text-splitters%3D%3D0%22&expanded=true"
[tool.poetry.dependencies]
python = ">=3.9,<4.0"
langchain-core = "^0.3.29"
[tool.ruff]
target-version = "py39"
@@ -56,84 +39,92 @@ markers = [
]
asyncio_mode = "auto"
[tool.poetry.group.lint]
optional = true
[tool.poetry.group.typing]
optional = true
[tool.poetry.group.dev]
optional = true
[tool.poetry.group.test]
optional = true
[tool.ruff.lint.pydocstyle]
convention = "google"
[tool.ruff.lint.per-file-ignores]
"tests/**" = ["D"]
[tool.poetry.group.lint.dependencies]
ruff = "^0.9.2"
[tool.pdm.dev-dependencies]
lint = [
"ruff<1.0.0,>=0.9.2",
"langchain-core @ file:///${PROJECT_ROOT}/../core",
]
typing = [
"mypy<2.0,>=1.10",
"lxml-stubs<1.0.0,>=0.5.1",
"types-requests<3.0.0.0,>=2.31.0.20240218",
"tiktoken<1.0.0,>=0.8.0",
]
dev = [
"jupyter<2.0.0,>=1.0.0",
"langchain-core @ file:///${PROJECT_ROOT}/../core",
]
test = [
"pytest<9,>=8",
"freezegun<2.0.0,>=1.2.2",
"pytest-mock<4.0.0,>=3.10.0",
"pytest-watcher<1.0.0,>=0.3.4",
"pytest-asyncio<1.0.0,>=0.21.1",
"pytest-socket<1.0.0,>=0.7.0",
"pytest-xdist<4.0.0,>=3.6.1",
"langchain-core @ file:///${PROJECT_ROOT}/../core",
]
test_integration = [
"spacy; python_version < \"3.13\"",
"nltk<4.0.0,>=3.9.1",
"transformers<5.0.0,>=4.47.0",
"sentence-transformers>=2.6.0; python_version < \"3.13\"",
]
[tool.pdm.build]
includes = []
[project]
authors = []
license = {text = "MIT"}
requires-python = "<4.0,>=3.9"
dependencies = [
"langchain-core<1.0.0,>=0.3.29",
]
name = "langchain-text-splitters"
version = "0.3.5"
description = "LangChain text splitting utilities"
readme = "README.md"
[project.urls]
"Source Code" = "https://github.com/langchain-ai/langchain/tree/master/libs/text-splitters"
"Release Notes" = "https://github.com/langchain-ai/langchain/releases?q=tag%3A%22langchain-text-splitters%3D%3D0%22&expanded=true"
repository = "https://github.com/langchain-ai/langchain"
[tool.poetry.group.typing.dependencies]
mypy = "^1.10"
lxml-stubs = "^0.5.1"
types-requests = "^2.31.0.20240218"
tiktoken = "^0.8.0"
[tool.poetry.group.dev.dependencies]
jupyter = "^1.0.0"
[tool.poetry.group.test.dependencies]
pytest = "^8"
freezegun = "^1.2.2"
pytest-mock = "^3.10.0"
pytest-watcher = "^0.3.4"
pytest-asyncio = "^0.21.1"
pytest-socket = "^0.7.0"
pytest-xdist = "^3.6.1"
[tool.poetry.group.test_integration]
optional = true
[tool.poetry.group.test_integration.dependencies]
spacy = { version = "*", python = "<3.13" }
nltk = "^3.9.1"
transformers = "^4.47.0"
sentence-transformers = { version = ">=2.6.0", python = "<3.13" }
[tool.poetry.group.lint.dependencies.langchain-core]
path = "../core"
develop = true
[tool.poetry.group.dev.dependencies.langchain-core]
path = "../core"
develop = true
[tool.poetry.group.test.dependencies.langchain-core]
path = "../core"
develop = true
[dependency-groups]
lint = [
"ruff<1.0.0,>=0.9.2",
"langchain-core @ file:///${PROJECT_ROOT}/../core",
]
typing = [
"mypy<2.0,>=1.10",
"lxml-stubs<1.0.0,>=0.5.1",
"types-requests<3.0.0.0,>=2.31.0.20240218",
"tiktoken<1.0.0,>=0.8.0",
]
dev = [
"jupyter<2.0.0,>=1.0.0",
"langchain-core @ file:///${PROJECT_ROOT}/../core",
]
test = [
"pytest<9,>=8",
"freezegun<2.0.0,>=1.2.2",
"pytest-mock<4.0.0,>=3.10.0",
"pytest-watcher<1.0.0,>=0.3.4",
"pytest-asyncio<1.0.0,>=0.21.1",
"pytest-socket<1.0.0,>=0.7.0",
"pytest-xdist<4.0.0,>=3.6.1",
"langchain-core @ file:///${PROJECT_ROOT}/../core",
]
test_integration = [
"spacy; python_version < \"3.13\"",
"nltk<4.0.0,>=3.9.1",
"transformers<5.0.0,>=4.47.0",
"sentence-transformers>=2.6.0; python_version < \"3.13\"",
]

3975
libs/text-splitters/uv.lock generated Normal file

File diff suppressed because it is too large Load Diff