mirror of
https://github.com/hwchase17/langchain.git
synced 2026-02-11 03:30:09 +00:00
Compare commits
38 Commits
cc/test_o3
...
langchain=
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
584a4bd9f6 | ||
|
|
0064cc4751 | ||
|
|
d9d8735d9f | ||
|
|
fb4adb444d | ||
|
|
dd47b8c454 | ||
|
|
1c634f9436 | ||
|
|
e939cd462a | ||
|
|
bc24f60a39 | ||
|
|
7a8d5586de | ||
|
|
3898048b49 | ||
|
|
cfa56be510 | ||
|
|
fb79799571 | ||
|
|
d9ce5f4464 | ||
|
|
d6e2af40eb | ||
|
|
1a22baf999 | ||
|
|
e20603e225 | ||
|
|
c71fc8b58a | ||
|
|
1a4af623f1 | ||
|
|
dfce027c57 | ||
|
|
701387066c | ||
|
|
ea838e80c7 | ||
|
|
4dcc659f48 | ||
|
|
dcbedef4d8 | ||
|
|
070e12f886 | ||
|
|
aa7637b2e4 | ||
|
|
b7bdf0b81a | ||
|
|
e034999cb6 | ||
|
|
a1a6ef10a9 | ||
|
|
6693a0c80b | ||
|
|
4202c1411b | ||
|
|
10f677f2f7 | ||
|
|
f71cf387f0 | ||
|
|
b07e6be354 | ||
|
|
0c782ee547 | ||
|
|
db1693aa70 | ||
|
|
2f97916dea | ||
|
|
a3c5e4d070 | ||
|
|
16a422f3fa |
23
.github/actions/uv_setup/action.yml
vendored
Normal file
23
.github/actions/uv_setup/action.yml
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
# TODO: https://docs.astral.sh/uv/guides/integration/github/#caching
|
||||
|
||||
name: uv-install
|
||||
description: Set up Python and uv
|
||||
|
||||
inputs:
|
||||
python-version:
|
||||
description: Python version, supporting MAJOR.MINOR only
|
||||
required: true
|
||||
|
||||
env:
|
||||
UV_VERSION: "0.5.25"
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install uv and set the python version
|
||||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
version: ${{ env.UV_VERSION }}
|
||||
python-version: ${{ inputs.python-version }}
|
||||
33
.github/scripts/check_diff.py
vendored
33
.github/scripts/check_diff.py
vendored
@@ -7,6 +7,8 @@ from typing import Dict, List, Set
|
||||
from pathlib import Path
|
||||
import tomllib
|
||||
|
||||
from packaging.requirements import Requirement
|
||||
|
||||
from get_min_versions import get_min_version_from_toml
|
||||
|
||||
|
||||
@@ -55,21 +57,25 @@ def dependents_graph() -> dict:
|
||||
"""
|
||||
dependents = defaultdict(set)
|
||||
|
||||
for path in glob.glob("./libs/**/pyproject.toml", recursive=True):
|
||||
# for path in glob.glob("./libs/**/pyproject.toml", recursive=True):
|
||||
# TODO: fix this
|
||||
for path in ["./libs/langchain/pyproject.toml", "./libs/core/pyproject.toml"]:
|
||||
if "template" in path:
|
||||
continue
|
||||
|
||||
# load regular and test deps from pyproject.toml
|
||||
with open(path, "rb") as f:
|
||||
pyproject = tomllib.load(f)["tool"]["poetry"]
|
||||
pyproject = tomllib.load(f)
|
||||
|
||||
pkg_dir = "libs" + "/".join(path.split("libs")[1].split("/")[:-1])
|
||||
for dep in [
|
||||
*pyproject["dependencies"].keys(),
|
||||
*pyproject["group"]["test"]["dependencies"].keys(),
|
||||
*pyproject["project"]["dependencies"],
|
||||
*pyproject["dependency-groups"]["test"],
|
||||
]:
|
||||
requirement = Requirement(dep)
|
||||
package_name = requirement.name
|
||||
if "langchain" in dep:
|
||||
dependents[dep].add(pkg_dir)
|
||||
dependents[package_name].add(pkg_dir)
|
||||
continue
|
||||
|
||||
# load extended deps from extended_testing_deps.txt
|
||||
@@ -120,8 +126,7 @@ def _get_configs_for_single_dir(job: str, dir_: str) -> List[Dict[str, str]]:
|
||||
py_versions = ["3.9", "3.10", "3.11", "3.12", "3.13"]
|
||||
# custom logic for specific directories
|
||||
elif dir_ == "libs/partners/milvus":
|
||||
# milvus poetry doesn't allow 3.12 because they
|
||||
# declare deps in funny way
|
||||
# milvus doesn't allow 3.12 because they declare deps in funny way
|
||||
py_versions = ["3.9", "3.11"]
|
||||
|
||||
elif dir_ in PY_312_MAX_PACKAGES:
|
||||
@@ -148,17 +153,17 @@ def _get_configs_for_single_dir(job: str, dir_: str) -> List[Dict[str, str]]:
|
||||
def _get_pydantic_test_configs(
|
||||
dir_: str, *, python_version: str = "3.11"
|
||||
) -> List[Dict[str, str]]:
|
||||
with open("./libs/core/poetry.lock", "rb") as f:
|
||||
core_poetry_lock_data = tomllib.load(f)
|
||||
for package in core_poetry_lock_data["package"]:
|
||||
with open("./libs/core/uv.lock", "rb") as f:
|
||||
core_uv_lock_data = tomllib.load(f)
|
||||
for package in core_uv_lock_data["package"]:
|
||||
if package["name"] == "pydantic":
|
||||
core_max_pydantic_minor = package["version"].split(".")[1]
|
||||
break
|
||||
|
||||
with open(f"./{dir_}/poetry.lock", "rb") as f:
|
||||
dir_poetry_lock_data = tomllib.load(f)
|
||||
with open(f"./{dir_}/uv.lock", "rb") as f:
|
||||
dir_uv_lock_data = tomllib.load(f)
|
||||
|
||||
for package in dir_poetry_lock_data["package"]:
|
||||
for package in dir_uv_lock_data["package"]:
|
||||
if package["name"] == "pydantic":
|
||||
dir_max_pydantic_minor = package["version"].split(".")[1]
|
||||
break
|
||||
@@ -304,7 +309,7 @@ if __name__ == "__main__":
|
||||
f"Unknown lib: {file}. check_diff.py likely needs "
|
||||
"an update for this new library!"
|
||||
)
|
||||
elif file.startswith("docs/") or file in ["pyproject.toml", "poetry.lock"]: # docs or root poetry files
|
||||
elif file.startswith("docs/") or file in ["pyproject.toml", "uv.lock"]: # docs or root uv files
|
||||
docs_edited = True
|
||||
dirs_to_run["lint"].add(".")
|
||||
|
||||
|
||||
13
.github/scripts/check_prerelease_dependencies.py
vendored
13
.github/scripts/check_prerelease_dependencies.py
vendored
@@ -10,26 +10,25 @@ if __name__ == "__main__":
|
||||
toml_data = tomllib.load(file)
|
||||
|
||||
# see if we're releasing an rc
|
||||
version = toml_data["tool"]["poetry"]["version"]
|
||||
version = toml_data["project"]["version"]
|
||||
releasing_rc = "rc" in version or "dev" in version
|
||||
|
||||
# if not, iterate through dependencies and make sure none allow prereleases
|
||||
if not releasing_rc:
|
||||
dependencies = toml_data["tool"]["poetry"]["dependencies"]
|
||||
for lib in dependencies:
|
||||
dep_version = dependencies[lib]
|
||||
if True:#not releasing_rc:
|
||||
dependencies = toml_data["project"]["dependencies"]
|
||||
for dep_version in dependencies:
|
||||
dep_version_string = (
|
||||
dep_version["version"] if isinstance(dep_version, dict) else dep_version
|
||||
)
|
||||
|
||||
if "rc" in dep_version_string:
|
||||
raise ValueError(
|
||||
f"Dependency {lib} has a prerelease version. Please remove this."
|
||||
f"Dependency {dep_version} has a prerelease version. Please remove this."
|
||||
)
|
||||
|
||||
if isinstance(dep_version, dict) and dep_version.get(
|
||||
"allow-prereleases", False
|
||||
):
|
||||
raise ValueError(
|
||||
f"Dependency {lib} has allow-prereleases set to true. Please remove this."
|
||||
f"Dependency {dep_version} has allow-prereleases set to true. Please remove this."
|
||||
)
|
||||
|
||||
41
.github/scripts/get_min_versions.py
vendored
41
.github/scripts/get_min_versions.py
vendored
@@ -1,3 +1,4 @@
|
||||
from collections import defaultdict
|
||||
import sys
|
||||
from typing import Optional
|
||||
|
||||
@@ -7,6 +8,7 @@ else:
|
||||
# for python 3.10 and below, which doesnt have stdlib tomllib
|
||||
import tomli as tomllib
|
||||
|
||||
from packaging.requirements import Requirement
|
||||
from packaging.specifiers import SpecifierSet
|
||||
from packaging.version import Version
|
||||
|
||||
@@ -94,6 +96,23 @@ def get_minimum_version(package_name: str, spec_string: str) -> Optional[str]:
|
||||
return str(min(valid_versions)) if valid_versions else None
|
||||
|
||||
|
||||
def _check_python_version_from_requirement(
|
||||
requirement: Requirement, python_version: str
|
||||
) -> bool:
|
||||
if not requirement.marker:
|
||||
return True
|
||||
else:
|
||||
marker_str = str(requirement.marker)
|
||||
if "python_version" or "python_full_version" in marker_str:
|
||||
python_version_str = "".join(
|
||||
char
|
||||
for char in marker_str
|
||||
if char.isdigit() or char in (".", "<", ">", "=", ",")
|
||||
)
|
||||
return check_python_version(python_version, python_version_str)
|
||||
return True
|
||||
|
||||
|
||||
def get_min_version_from_toml(
|
||||
toml_path: str,
|
||||
versions_for: str,
|
||||
@@ -105,8 +124,10 @@ def get_min_version_from_toml(
|
||||
with open(toml_path, "rb") as file:
|
||||
toml_data = tomllib.load(file)
|
||||
|
||||
# Get the dependencies from tool.poetry.dependencies
|
||||
dependencies = toml_data["tool"]["poetry"]["dependencies"]
|
||||
dependencies = defaultdict(list)
|
||||
for dep in toml_data["project"]["dependencies"]:
|
||||
requirement = Requirement(dep)
|
||||
dependencies[requirement.name].append(requirement)
|
||||
|
||||
# Initialize a dictionary to store the minimum versions
|
||||
min_versions = {}
|
||||
@@ -121,17 +142,11 @@ def get_min_version_from_toml(
|
||||
if lib in dependencies:
|
||||
if include and lib not in include:
|
||||
continue
|
||||
# Get the version string
|
||||
version_string = dependencies[lib]
|
||||
|
||||
if isinstance(version_string, dict):
|
||||
version_string = version_string["version"]
|
||||
if isinstance(version_string, list):
|
||||
version_string = [
|
||||
vs
|
||||
for vs in version_string
|
||||
if check_python_version(python_version, vs["python"])
|
||||
][0]["version"]
|
||||
requirements = dependencies[lib]
|
||||
for requirement in requirements:
|
||||
if _check_python_version_from_requirement(requirement, python_version):
|
||||
version_string = str(requirement.specifier)
|
||||
break
|
||||
|
||||
# Use parse_version to get the minimum supported version from version_string
|
||||
min_version = get_minimum_version(lib, version_string)
|
||||
|
||||
16
.github/workflows/_compile_integration_test.yml
vendored
16
.github/workflows/_compile_integration_test.yml
vendored
@@ -12,9 +12,6 @@ on:
|
||||
type: string
|
||||
description: "Python version to use"
|
||||
|
||||
env:
|
||||
POETRY_VERSION: "1.8.4"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
defaults:
|
||||
@@ -22,25 +19,22 @@ jobs:
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
name: "poetry run pytest -m compile tests/integration_tests #${{ inputs.python-version }}"
|
||||
name: "uv run pytest -m compile tests/integration_tests #${{ inputs.python-version }}"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python ${{ inputs.python-version }} + Poetry ${{ env.POETRY_VERSION }}
|
||||
uses: "./.github/actions/poetry_setup"
|
||||
- name: Set up Python ${{ inputs.python-version }} + uv
|
||||
uses: "./.github/actions/uv_setup"
|
||||
with:
|
||||
python-version: ${{ inputs.python-version }}
|
||||
poetry-version: ${{ env.POETRY_VERSION }}
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
cache-key: compile-integration
|
||||
|
||||
- name: Install integration dependencies
|
||||
shell: bash
|
||||
run: poetry install --with=test_integration,test
|
||||
run: uv sync --group test --group test_integration
|
||||
|
||||
- name: Check integration tests compile
|
||||
shell: bash
|
||||
run: poetry run pytest -m compile tests/integration_tests
|
||||
run: uv run pytest -m compile tests/integration_tests
|
||||
|
||||
- name: Ensure the tests did not create any additional files
|
||||
shell: bash
|
||||
|
||||
14
.github/workflows/_integration_test.yml
vendored
14
.github/workflows/_integration_test.yml
vendored
@@ -11,9 +11,6 @@ on:
|
||||
type: string
|
||||
description: "Python version to use"
|
||||
|
||||
env:
|
||||
POETRY_VERSION: "1.8.4"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
defaults:
|
||||
@@ -24,22 +21,19 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python ${{ inputs.python-version }} + Poetry ${{ env.POETRY_VERSION }}
|
||||
uses: "./.github/actions/poetry_setup"
|
||||
- name: Set up Python ${{ inputs.python-version }} + uv
|
||||
uses: "./.github/actions/uv_setup"
|
||||
with:
|
||||
python-version: ${{ inputs.python-version }}
|
||||
poetry-version: ${{ env.POETRY_VERSION }}
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
cache-key: core
|
||||
|
||||
- name: Install dependencies
|
||||
shell: bash
|
||||
run: poetry install --with test,test_integration
|
||||
run: uv sync --group test --group test_integration
|
||||
|
||||
- name: Install deps outside pyproject
|
||||
if: ${{ startsWith(inputs.working-directory, 'libs/community/') }}
|
||||
shell: bash
|
||||
run: poetry run pip install "boto3<2" "google-cloud-aiplatform<2"
|
||||
run: uv pip install "boto3<2" "google-cloud-aiplatform<2"
|
||||
|
||||
- name: Run integration tests
|
||||
shell: bash
|
||||
|
||||
45
.github/workflows/_lint.yml
vendored
45
.github/workflows/_lint.yml
vendored
@@ -13,7 +13,6 @@ on:
|
||||
description: "Python version to use"
|
||||
|
||||
env:
|
||||
POETRY_VERSION: "1.8.4"
|
||||
WORKDIR: ${{ inputs.working-directory == '' && '.' || inputs.working-directory }}
|
||||
|
||||
# This env var allows us to get inline annotations when ruff has complaints.
|
||||
@@ -27,25 +26,10 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python ${{ inputs.python-version }} + Poetry ${{ env.POETRY_VERSION }}
|
||||
uses: "./.github/actions/poetry_setup"
|
||||
- name: Set up Python ${{ inputs.python-version }} + uv
|
||||
uses: "./.github/actions/uv_setup"
|
||||
with:
|
||||
python-version: ${{ inputs.python-version }}
|
||||
poetry-version: ${{ env.POETRY_VERSION }}
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
cache-key: lint-with-extras
|
||||
|
||||
- name: Check Poetry File
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
run: |
|
||||
poetry check
|
||||
|
||||
- name: Check lock file
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
run: |
|
||||
poetry lock --check
|
||||
|
||||
- name: Install dependencies
|
||||
# Also installs dev/lint/test/typing dependencies, to ensure we have
|
||||
@@ -58,17 +42,7 @@ jobs:
|
||||
# It doesn't matter how you change it, any change will cause a cache-bust.
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
run: |
|
||||
poetry install --with lint,typing
|
||||
|
||||
- name: Get .mypy_cache to speed up mypy
|
||||
uses: actions/cache@v4
|
||||
env:
|
||||
SEGMENT_DOWNLOAD_TIMEOUT_MIN: "2"
|
||||
with:
|
||||
path: |
|
||||
${{ env.WORKDIR }}/.mypy_cache
|
||||
key: mypy-lint-${{ runner.os }}-${{ runner.arch }}-py${{ inputs.python-version }}-${{ inputs.working-directory }}-${{ hashFiles(format('{0}/poetry.lock', inputs.working-directory)) }}
|
||||
|
||||
uv sync --group lint --group typing
|
||||
|
||||
- name: Analysing the code with our lint
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
@@ -87,21 +61,12 @@ jobs:
|
||||
if: ${{ ! startsWith(inputs.working-directory, 'libs/partners/') }}
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
run: |
|
||||
poetry install --with test
|
||||
uv sync --group test
|
||||
- name: Install unit+integration test dependencies
|
||||
if: ${{ startsWith(inputs.working-directory, 'libs/partners/') }}
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
run: |
|
||||
poetry install --with test,test_integration
|
||||
|
||||
- name: Get .mypy_cache_test to speed up mypy
|
||||
uses: actions/cache@v4
|
||||
env:
|
||||
SEGMENT_DOWNLOAD_TIMEOUT_MIN: "2"
|
||||
with:
|
||||
path: |
|
||||
${{ env.WORKDIR }}/.mypy_cache_test
|
||||
key: mypy-test-${{ runner.os }}-${{ runner.arch }}-py${{ inputs.python-version }}-${{ inputs.working-directory }}-${{ hashFiles(format('{0}/poetry.lock', inputs.working-directory)) }}
|
||||
uv sync --group test --group test_integration
|
||||
|
||||
- name: Analysing the code with our lint
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
|
||||
64
.github/workflows/_release.yml
vendored
64
.github/workflows/_release.yml
vendored
@@ -21,7 +21,6 @@ on:
|
||||
|
||||
env:
|
||||
PYTHON_VERSION: "3.11"
|
||||
POETRY_VERSION: "1.8.4"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@@ -36,13 +35,10 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python + Poetry ${{ env.POETRY_VERSION }}
|
||||
uses: "./.github/actions/poetry_setup"
|
||||
- name: Set up Python + uv
|
||||
uses: "./.github/actions/uv_setup"
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
poetry-version: ${{ env.POETRY_VERSION }}
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
cache-key: release
|
||||
|
||||
# We want to keep this build stage *separate* from the release stage,
|
||||
# so that there's no sharing of permissions between them.
|
||||
@@ -56,7 +52,7 @@ jobs:
|
||||
# > from the publish job.
|
||||
# https://github.com/pypa/gh-action-pypi-publish#non-goals
|
||||
- name: Build project for distribution
|
||||
run: poetry build
|
||||
run: uv build
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
|
||||
- name: Upload build
|
||||
@@ -67,11 +63,18 @@ jobs:
|
||||
|
||||
- name: Check Version
|
||||
id: check-version
|
||||
shell: bash
|
||||
shell: python
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
run: |
|
||||
echo pkg-name="$(poetry version | cut -d ' ' -f 1)" >> $GITHUB_OUTPUT
|
||||
echo version="$(poetry version --short)" >> $GITHUB_OUTPUT
|
||||
import os
|
||||
import tomllib
|
||||
with open("pyproject.toml", "rb") as f:
|
||||
data = tomllib.load(f)
|
||||
pkg_name = data["project"]["name"]
|
||||
version = data["project"]["version"]
|
||||
with open(os.environ["GITHUB_OUTPUT"], "a") as f:
|
||||
f.write(f"pkg-name={pkg_name}\n")
|
||||
f.write(f"version={version}\n")
|
||||
release-notes:
|
||||
needs:
|
||||
- build
|
||||
@@ -184,13 +187,11 @@ jobs:
|
||||
# - The package is published, and it breaks on the missing dependency when
|
||||
# used in the real world.
|
||||
|
||||
- name: Set up Python + Poetry ${{ env.POETRY_VERSION }}
|
||||
uses: "./.github/actions/poetry_setup"
|
||||
- name: Set up Python + uv
|
||||
uses: "./.github/actions/uv_setup"
|
||||
id: setup-python
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
poetry-version: ${{ env.POETRY_VERSION }}
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
@@ -213,17 +214,18 @@ jobs:
|
||||
# - attempt install again after 5 seconds if it fails because there is
|
||||
# sometimes a delay in availability on test pypi
|
||||
run: |
|
||||
poetry run pip install dist/*.whl
|
||||
uv venv
|
||||
uv run pip install dist/*.whl
|
||||
|
||||
# Replace all dashes in the package name with underscores,
|
||||
# since that's how Python imports packages with dashes in the name.
|
||||
# also remove _official suffix
|
||||
IMPORT_NAME="$(echo "$PKG_NAME" | sed s/-/_/g | sed s/_official//g)"
|
||||
|
||||
poetry run python -c "import $IMPORT_NAME; print(dir($IMPORT_NAME))"
|
||||
uv run python -c "import $IMPORT_NAME; print(dir($IMPORT_NAME))"
|
||||
|
||||
- name: Import test dependencies
|
||||
run: poetry install --with test --no-root
|
||||
run: uv sync --group test
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
|
||||
# Overwrite the local version of the package with the built version
|
||||
@@ -234,7 +236,7 @@ jobs:
|
||||
PKG_NAME: ${{ needs.build.outputs.pkg-name }}
|
||||
VERSION: ${{ needs.build.outputs.version }}
|
||||
run: |
|
||||
poetry run pip install dist/*.whl
|
||||
uv run pip install dist/*.whl
|
||||
|
||||
- name: Run unit tests
|
||||
run: make tests
|
||||
@@ -243,15 +245,15 @@ jobs:
|
||||
- name: Check for prerelease versions
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
run: |
|
||||
poetry run python $GITHUB_WORKSPACE/.github/scripts/check_prerelease_dependencies.py pyproject.toml
|
||||
uv run python $GITHUB_WORKSPACE/.github/scripts/check_prerelease_dependencies.py pyproject.toml
|
||||
|
||||
- name: Get minimum versions
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
id: min-version
|
||||
run: |
|
||||
poetry run pip install packaging requests
|
||||
python_version="$(poetry run python --version | awk '{print $2}')"
|
||||
min_versions="$(poetry run python $GITHUB_WORKSPACE/.github/scripts/get_min_versions.py pyproject.toml release $python_version)"
|
||||
uv run pip install packaging requests
|
||||
python_version="$(uv run python --version | awk '{print $2}')"
|
||||
min_versions="$(uv run python $GITHUB_WORKSPACE/.github/scripts/get_min_versions.py pyproject.toml release $python_version)"
|
||||
echo "min-versions=$min_versions" >> "$GITHUB_OUTPUT"
|
||||
echo "min-versions=$min_versions"
|
||||
|
||||
@@ -260,12 +262,12 @@ jobs:
|
||||
env:
|
||||
MIN_VERSIONS: ${{ steps.min-version.outputs.min-versions }}
|
||||
run: |
|
||||
poetry run pip install --force-reinstall $MIN_VERSIONS --editable .
|
||||
uv run pip install --force-reinstall $MIN_VERSIONS --editable .
|
||||
make tests
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
|
||||
- name: Import integration test dependencies
|
||||
run: poetry install --with test,test_integration
|
||||
run: uv sync --group test --group test_integration
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
|
||||
- name: Run integration tests
|
||||
@@ -331,13 +333,10 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python + Poetry ${{ env.POETRY_VERSION }}
|
||||
uses: "./.github/actions/poetry_setup"
|
||||
- name: Set up Python + uv
|
||||
uses: "./.github/actions/uv_setup"
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
poetry-version: ${{ env.POETRY_VERSION }}
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
cache-key: release
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
@@ -373,13 +372,10 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python + Poetry ${{ env.POETRY_VERSION }}
|
||||
uses: "./.github/actions/poetry_setup"
|
||||
- name: Set up Python + uv
|
||||
uses: "./.github/actions/uv_setup"
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
poetry-version: ${{ env.POETRY_VERSION }}
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
cache-key: release
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
|
||||
21
.github/workflows/_test.yml
vendored
21
.github/workflows/_test.yml
vendored
@@ -12,9 +12,6 @@ on:
|
||||
type: string
|
||||
description: "Python version to use"
|
||||
|
||||
env:
|
||||
POETRY_VERSION: "1.8.4"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
defaults:
|
||||
@@ -26,17 +23,14 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python ${{ inputs.python-version }} + Poetry ${{ env.POETRY_VERSION }}
|
||||
uses: "./.github/actions/poetry_setup"
|
||||
- name: Set up Python ${{ inputs.python-version }} + uv
|
||||
uses: "./.github/actions/uv_setup"
|
||||
id: setup-python
|
||||
with:
|
||||
python-version: ${{ inputs.python-version }}
|
||||
poetry-version: ${{ env.POETRY_VERSION }}
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
cache-key: core
|
||||
- name: Install dependencies
|
||||
shell: bash
|
||||
run: poetry install --with test
|
||||
run: uv sync --group test --dev
|
||||
|
||||
- name: Run core tests
|
||||
shell: bash
|
||||
@@ -48,9 +42,9 @@ jobs:
|
||||
id: min-version
|
||||
shell: bash
|
||||
run: |
|
||||
poetry run pip install packaging tomli requests
|
||||
python_version="$(poetry run python --version | awk '{print $2}')"
|
||||
min_versions="$(poetry run python $GITHUB_WORKSPACE/.github/scripts/get_min_versions.py pyproject.toml pull_request $python_version)"
|
||||
uv run pip install packaging tomli requests
|
||||
python_version="$(uv run python --version | awk '{print $2}')"
|
||||
min_versions="$(uv run python $GITHUB_WORKSPACE/.github/scripts/get_min_versions.py pyproject.toml pull_request $python_version)"
|
||||
echo "min-versions=$min_versions" >> "$GITHUB_OUTPUT"
|
||||
echo "min-versions=$min_versions"
|
||||
|
||||
@@ -59,8 +53,7 @@ jobs:
|
||||
env:
|
||||
MIN_VERSIONS: ${{ steps.min-version.outputs.min-versions }}
|
||||
run: |
|
||||
poetry run pip install uv
|
||||
poetry run uv pip install $MIN_VERSIONS
|
||||
uv run pip install $MIN_VERSIONS
|
||||
make tests
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
|
||||
|
||||
14
.github/workflows/_test_pydantic.yml
vendored
14
.github/workflows/_test_pydantic.yml
vendored
@@ -17,9 +17,6 @@ on:
|
||||
type: string
|
||||
description: "Pydantic version to test."
|
||||
|
||||
env:
|
||||
POETRY_VERSION: "1.8.4"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
defaults:
|
||||
@@ -31,21 +28,18 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python ${{ inputs.python-version }} + Poetry ${{ env.POETRY_VERSION }}
|
||||
uses: "./.github/actions/poetry_setup"
|
||||
- name: Set up Python ${{ inputs.python-version }} + uv
|
||||
uses: "./.github/actions/uv_setup"
|
||||
with:
|
||||
python-version: ${{ inputs.python-version }}
|
||||
poetry-version: ${{ env.POETRY_VERSION }}
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
cache-key: core
|
||||
|
||||
- name: Install dependencies
|
||||
shell: bash
|
||||
run: poetry install --with test
|
||||
run: uv sync --group test
|
||||
|
||||
- name: Overwrite pydantic version
|
||||
shell: bash
|
||||
run: poetry run pip install pydantic~=${{ inputs.pydantic-version }}
|
||||
run: uv pip install pydantic~=${{ inputs.pydantic-version }}
|
||||
|
||||
- name: Run core tests
|
||||
shell: bash
|
||||
|
||||
25
.github/workflows/_test_release.yml
vendored
25
.github/workflows/_test_release.yml
vendored
@@ -14,8 +14,7 @@ on:
|
||||
description: "Release from a non-master branch (danger!)"
|
||||
|
||||
env:
|
||||
POETRY_VERSION: "1.8.4"
|
||||
PYTHON_VERSION: "3.10"
|
||||
PYTHON_VERSION: "3.11"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@@ -29,13 +28,10 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python + Poetry ${{ env.POETRY_VERSION }}
|
||||
uses: "./.github/actions/poetry_setup"
|
||||
- name: Set up Python + uv
|
||||
uses: "./.github/actions/uv_setup"
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
poetry-version: ${{ env.POETRY_VERSION }}
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
cache-key: release
|
||||
|
||||
# We want to keep this build stage *separate* from the release stage,
|
||||
# so that there's no sharing of permissions between them.
|
||||
@@ -49,7 +45,7 @@ jobs:
|
||||
# > from the publish job.
|
||||
# https://github.com/pypa/gh-action-pypi-publish#non-goals
|
||||
- name: Build project for distribution
|
||||
run: poetry build
|
||||
run: uv build
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
|
||||
- name: Upload build
|
||||
@@ -60,11 +56,18 @@ jobs:
|
||||
|
||||
- name: Check Version
|
||||
id: check-version
|
||||
shell: bash
|
||||
shell: python
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
run: |
|
||||
echo pkg-name="$(poetry version | cut -d ' ' -f 1)" >> $GITHUB_OUTPUT
|
||||
echo version="$(poetry version --short)" >> $GITHUB_OUTPUT
|
||||
import os
|
||||
import tomllib
|
||||
with open("pyproject.toml", "rb") as f:
|
||||
data = tomllib.load(f)
|
||||
pkg_name = data["project"]["name"]
|
||||
version = data["project"]["version"]
|
||||
with open(os.environ["GITHUB_OUTPUT"], "a") as f:
|
||||
f.write(f"pkg-name={pkg_name}\n")
|
||||
f.write(f"version={version}\n")
|
||||
|
||||
publish:
|
||||
needs:
|
||||
|
||||
17
.github/workflows/check_diffs.yml
vendored
17
.github/workflows/check_diffs.yml
vendored
@@ -17,9 +17,6 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
POETRY_VERSION: "1.8.4"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -127,21 +124,17 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python ${{ matrix.job-configs.python-version }} + Poetry ${{ env.POETRY_VERSION }}
|
||||
uses: "./.github/actions/poetry_setup"
|
||||
- name: Set up Python ${{ matrix.job-configs.python-version }} + uv
|
||||
uses: "./.github/actions/uv_setup"
|
||||
with:
|
||||
python-version: ${{ matrix.job-configs.python-version }}
|
||||
poetry-version: ${{ env.POETRY_VERSION }}
|
||||
working-directory: ${{ matrix.job-configs.working-directory }}
|
||||
cache-key: extended
|
||||
|
||||
- name: Install dependencies
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Running extended tests, installing dependencies with poetry..."
|
||||
poetry install --with test
|
||||
poetry run pip install uv
|
||||
poetry run uv pip install -r extended_testing_deps.txt
|
||||
echo "Running extended tests, installing dependencies with uv..."
|
||||
uv sync --group test
|
||||
uv pip install -r extended_testing_deps.txt
|
||||
|
||||
- name: Run extended tests
|
||||
run: make extended_tests
|
||||
|
||||
3
.github/workflows/scheduled_test.yml
vendored
3
.github/workflows/scheduled_test.yml
vendored
@@ -14,7 +14,7 @@ on:
|
||||
|
||||
env:
|
||||
POETRY_VERSION: "1.8.4"
|
||||
DEFAULT_LIBS: '["libs/partners/openai", "libs/partners/anthropic", "libs/partners/fireworks", "libs/partners/groq", "libs/partners/mistralai", "libs/partners/google-vertexai", "libs/partners/google-genai", "libs/partners/aws"]'
|
||||
DEFAULT_LIBS: '["libs/partners/openai", "libs/partners/anthropic", "libs/partners/fireworks", "libs/partners/groq", "libs/partners/mistralai", "libs/partners/deepseek", "libs/partners/google-vertexai", "libs/partners/google-genai", "libs/partners/aws"]'
|
||||
|
||||
jobs:
|
||||
compute-matrix:
|
||||
@@ -117,6 +117,7 @@ jobs:
|
||||
AZURE_OPENAI_LEGACY_CHAT_DEPLOYMENT_NAME: ${{ secrets.AZURE_OPENAI_LEGACY_CHAT_DEPLOYMENT_NAME }}
|
||||
AZURE_OPENAI_LLM_DEPLOYMENT_NAME: ${{ secrets.AZURE_OPENAI_LLM_DEPLOYMENT_NAME }}
|
||||
AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT_NAME: ${{ secrets.AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT_NAME }}
|
||||
DEEPSEEK_API_KEY: ${{ secrets.DEEPSEEK_API_KEY }}
|
||||
FIREWORKS_API_KEY: ${{ secrets.FIREWORKS_API_KEY }}
|
||||
GROQ_API_KEY: ${{ secrets.GROQ_API_KEY }}
|
||||
HUGGINGFACEHUB_API_TOKEN: ${{ secrets.HUGGINGFACEHUB_API_TOKEN }}
|
||||
|
||||
354
docs/docs/integrations/chat/goodfire.ipynb
Normal file
354
docs/docs/integrations/chat/goodfire.ipynb
Normal file
@@ -0,0 +1,354 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "raw",
|
||||
"id": "afaf8039",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"---\n",
|
||||
"sidebar_label: Goodfire\n",
|
||||
"---"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "e49f1e0d",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# ChatGoodfire\n",
|
||||
"\n",
|
||||
"This will help you getting started with Goodfire [chat models](/docs/concepts/chat_models). For detailed documentation of all ChatGoodfire features and configurations head to the [PyPI project page](https://pypi.org/project/langchain-goodfire/), or go directly to the [Goodfire SDK docs](https://docs.goodfire.ai/sdk-reference/example). All of the Goodfire-specific functionality (e.g. SAE features, variants, etc.) is available via the main `goodfire` package. This integration is a wrapper around the Goodfire SDK.\n",
|
||||
"\n",
|
||||
"## Overview\n",
|
||||
"### Integration details\n",
|
||||
"\n",
|
||||
"| Class | Package | Local | Serializable | JS support | Package downloads | Package latest |\n",
|
||||
"| :--- | :--- | :---: | :---: | :---: | :---: | :---: |\n",
|
||||
"| [ChatGoodfire](https://python.langchain.com/api_reference/goodfire/chat_models/langchain_goodfire.chat_models.ChatGoodfire.html) | [langchain-goodfire](https://python.langchain.com/api_reference/goodfire/) | ❌ | ❌ | ❌ |  |  |\n",
|
||||
"\n",
|
||||
"### Model features\n",
|
||||
"| [Tool calling](/docs/how_to/tool_calling) | [Structured output](/docs/how_to/structured_output/) | JSON mode | [Image input](/docs/how_to/multimodal_inputs/) | Audio input | Video input | [Token-level streaming](/docs/how_to/chat_streaming/) | Native async | [Token usage](/docs/how_to/chat_token_usage_tracking/) | [Logprobs](/docs/how_to/logprobs/) |\n",
|
||||
"| :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: |\n",
|
||||
"| ❌ | ❌ | ❌ | ❌ | ❌ | ❌ | ✅ | ✅ | ✅ | ❌ | \n",
|
||||
"\n",
|
||||
"## Setup\n",
|
||||
"\n",
|
||||
"To access Goodfire models you'll need to create a/an Goodfire account, get an API key, and install the `langchain-goodfire` integration package.\n",
|
||||
"\n",
|
||||
"### Credentials\n",
|
||||
"\n",
|
||||
"Head to [Goodfire Settings](https://platform.goodfire.ai/organization/settings/api-keys) to sign up to Goodfire and generate an API key. Once you've done this set the GOODFIRE_API_KEY environment variable."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"id": "433e8d2b-9519-4b49-b2c4-7ab65b046c94",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import getpass\n",
|
||||
"import os\n",
|
||||
"\n",
|
||||
"if not os.getenv(\"GOODFIRE_API_KEY\"):\n",
|
||||
" os.environ[\"GOODFIRE_API_KEY\"] = getpass.getpass(\"Enter your Goodfire API key: \")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "72ee0c4b-9764-423a-9dbf-95129e185210",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"If you want to get automated tracing of your model calls you can also set your [LangSmith](https://docs.smith.langchain.com/) API key by uncommenting below:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "a15d341e-3e26-4ca3-830b-5aab30ed66de",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# os.environ[\"LANGSMITH_TRACING\"] = \"true\"\n",
|
||||
"# os.environ[\"LANGSMITH_API_KEY\"] = getpass.getpass(\"Enter your LangSmith API key: \")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "0730d6a1-c893-4840-9817-5e5251676d5d",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Installation\n",
|
||||
"\n",
|
||||
"The LangChain Goodfire integration lives in the `langchain-goodfire` package:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"id": "652d6238-1f87-422a-b135-f5abbb8652fc",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Note: you may need to restart the kernel to use updated packages.\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"%pip install -qU langchain-goodfire"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "a38cde65-254d-4219-a441-068766c0d4b5",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Instantiation\n",
|
||||
"\n",
|
||||
"Now we can instantiate our model object and generate chat completions:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"id": "cb09c344-1836-4e0c-acf8-11d13ac1dbae",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"None of PyTorch, TensorFlow >= 2.0, or Flax have been found. Models won't be available and only tokenizers, configuration and file/data utilities can be used.\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"import goodfire\n",
|
||||
"from langchain_goodfire import ChatGoodfire\n",
|
||||
"\n",
|
||||
"base_variant = goodfire.Variant(\"meta-llama/Llama-3.3-70B-Instruct\")\n",
|
||||
"\n",
|
||||
"llm = ChatGoodfire(\n",
|
||||
" model=base_variant,\n",
|
||||
" temperature=0,\n",
|
||||
" max_completion_tokens=1000,\n",
|
||||
" seed=42,\n",
|
||||
")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "2b4f3e15",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Invocation"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"id": "62e0dbc3",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"AIMessage(content=\"J'adore la programmation.\", additional_kwargs={}, response_metadata={}, id='run-8d43cf35-bce8-4827-8935-c64f8fb78cd0-0', usage_metadata={'input_tokens': 51, 'output_tokens': 39, 'total_tokens': 90})"
|
||||
]
|
||||
},
|
||||
"execution_count": 4,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"messages = [\n",
|
||||
" (\n",
|
||||
" \"system\",\n",
|
||||
" \"You are a helpful assistant that translates English to French. Translate the user sentence.\",\n",
|
||||
" ),\n",
|
||||
" (\"human\", \"I love programming.\"),\n",
|
||||
"]\n",
|
||||
"ai_msg = await llm.ainvoke(messages)\n",
|
||||
"ai_msg"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"id": "d86145b3-bfef-46e8-b227-4dda5c9c2705",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"J'adore la programmation.\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"print(ai_msg.content)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "18e2bfc0-7e78-4528-a73f-499ac150dca8",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Chaining\n",
|
||||
"\n",
|
||||
"We can [chain](/docs/how_to/sequence/) our model with a prompt template like so:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"id": "e197d1d7-a070-4c96-9f8a-a0e86d046e0b",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"AIMessage(content='Ich liebe das Programmieren. How can I help you with programming today?', additional_kwargs={}, response_metadata={}, id='run-03d1a585-8234-46f1-a8df-bf9143fe3309-0', usage_metadata={'input_tokens': 46, 'output_tokens': 46, 'total_tokens': 92})"
|
||||
]
|
||||
},
|
||||
"execution_count": 6,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from langchain_core.prompts import ChatPromptTemplate\n",
|
||||
"\n",
|
||||
"prompt = ChatPromptTemplate(\n",
|
||||
" [\n",
|
||||
" (\n",
|
||||
" \"system\",\n",
|
||||
" \"You are a helpful assistant that translates {input_language} to {output_language}.\",\n",
|
||||
" ),\n",
|
||||
" (\"human\", \"{input}\"),\n",
|
||||
" ]\n",
|
||||
")\n",
|
||||
"\n",
|
||||
"chain = prompt | llm\n",
|
||||
"await chain.ainvoke(\n",
|
||||
" {\n",
|
||||
" \"input_language\": \"English\",\n",
|
||||
" \"output_language\": \"German\",\n",
|
||||
" \"input\": \"I love programming.\",\n",
|
||||
" }\n",
|
||||
")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "d1ee55bc-ffc8-4cfa-801c-993953a08cfd",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Goodfire-specific functionality\n",
|
||||
"\n",
|
||||
"To use Goodfire-specific functionality such as SAE features and variants, you can use the `goodfire` package directly."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 7,
|
||||
"id": "3aef9e0a",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"FeatureGroup([\n",
|
||||
" 0: \"The assistant should adopt the persona of a pirate\",\n",
|
||||
" 1: \"The assistant should roleplay as a pirate\",\n",
|
||||
" 2: \"The assistant should engage with pirate-themed content or roleplay as a pirate\",\n",
|
||||
" 3: \"The assistant should roleplay as a character\",\n",
|
||||
" 4: \"The assistant should roleplay as a specific character\",\n",
|
||||
" 5: \"The assistant should roleplay as a game character or NPC\",\n",
|
||||
" 6: \"The assistant should roleplay as a human character\",\n",
|
||||
" 7: \"Requests for the assistant to roleplay or pretend to be something else\",\n",
|
||||
" 8: \"Requests for the assistant to roleplay or pretend to be something\",\n",
|
||||
" 9: \"The assistant is being assigned a role or persona to roleplay\"\n",
|
||||
"])"
|
||||
]
|
||||
},
|
||||
"execution_count": 7,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"client = goodfire.Client(api_key=os.environ[\"GOODFIRE_API_KEY\"])\n",
|
||||
"\n",
|
||||
"pirate_features = client.features.search(\n",
|
||||
" \"assistant should roleplay as a pirate\", base_variant\n",
|
||||
")\n",
|
||||
"pirate_features"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 8,
|
||||
"id": "52f03a00",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"AIMessage(content='Why did the scarecrow win an award? Because he was outstanding in his field! Arrr! Hope that made ye laugh, matey!', additional_kwargs={}, response_metadata={}, id='run-7d8bd30f-7f80-41cb-bdb6-25c29c22a7ce-0', usage_metadata={'input_tokens': 35, 'output_tokens': 60, 'total_tokens': 95})"
|
||||
]
|
||||
},
|
||||
"execution_count": 8,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"pirate_variant = goodfire.Variant(\"meta-llama/Llama-3.3-70B-Instruct\")\n",
|
||||
"\n",
|
||||
"pirate_variant.set(pirate_features[0], 0.4)\n",
|
||||
"pirate_variant.set(pirate_features[1], 0.3)\n",
|
||||
"\n",
|
||||
"await llm.ainvoke(\"Tell me a joke\", model=pirate_variant)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "3a5bb5ca-c3ae-4a58-be67-2cd18574b9a3",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## API reference\n",
|
||||
"\n",
|
||||
"For detailed documentation of all ChatGoodfire features and configurations head to the [API reference](https://python.langchain.com/api_reference/goodfire/chat_models/langchain_goodfire.chat_models.ChatGoodfire.html)"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": ".venv",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.12.8"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
14
docs/docs/integrations/providers/goodfire.mdx
Normal file
14
docs/docs/integrations/providers/goodfire.mdx
Normal file
@@ -0,0 +1,14 @@
|
||||
# Goodfire
|
||||
|
||||
[Goodfire](https://www.goodfire.ai/) is a research lab focused on AI safety and
|
||||
interpretability.
|
||||
|
||||
## Installation and Setup
|
||||
|
||||
```bash
|
||||
pip install langchain-goodfire
|
||||
```
|
||||
|
||||
## Chat models
|
||||
|
||||
See detail on available chat models [here](/docs/integrations/chat/goodfire).
|
||||
@@ -1,4 +1,3 @@
|
||||
|
||||
# PaymanAI
|
||||
|
||||
PaymanAI provides functionality to send and receive payments (fiat and crypto) on behalf of an AI Agent. To get started:
|
||||
@@ -24,16 +23,16 @@ These can be wrapped as **LangChain Tools** for an LLM-based agent to call them
|
||||
|
||||
| Class | Package | Serializable | JS support | Package latest |
|
||||
| :--- | :--- | :---: | :---: | :--- |
|
||||
| PaymanAI | `langchain_community` | ❌ | ❌ | [PyPI Version] |
|
||||
| PaymanAI | `langchain-payman-tool` | ❌ | ❌ | [PyPI Version] |
|
||||
|
||||
If you're simply calling the PaymanAI SDK, you can do it directly or via the **Tool** interface in LangChain.
|
||||
|
||||
## Setup
|
||||
|
||||
1. **Install** the `langchain-community` (or equivalent) package:
|
||||
1. **Install** the PaymanAI tool package:
|
||||
|
||||
```bash
|
||||
pip install --quiet -U langchain-community
|
||||
pip install langchain-payman-tool
|
||||
```
|
||||
|
||||
2. **Install** the PaymanAI SDK:
|
||||
@@ -54,7 +53,7 @@ Your `PAYMAN_API_SECRET` should be the secret key from app.paymanai.com. The `PA
|
||||
Here is an example of instantiating a PaymanAI tool. If you have multiple Payman methods, you can create multiple tools.
|
||||
|
||||
```python
|
||||
from langchain_community.tools.langchain_payman_tool.tool import PaymanAI
|
||||
from langchain_payman_tool.tool import PaymanAI
|
||||
|
||||
# Instantiate the PaymanAI tool (example)
|
||||
tool = PaymanAI(
|
||||
@@ -104,7 +103,7 @@ You can bind a PaymanAI tool to a LangChain agent or chain that supports tool-ca
|
||||
1. **Sign up** at app.paymanai.com to get your **API Key**.
|
||||
2. **Install** dependencies:
|
||||
```bash
|
||||
pip install paymanai langchain-community
|
||||
pip install paymanai langchain-payman-tool
|
||||
```
|
||||
3. **Export** environment variables:
|
||||
```bash
|
||||
@@ -112,4 +111,4 @@ You can bind a PaymanAI tool to a LangChain agent or chain that supports tool-ca
|
||||
export PAYMAN_ENVIRONMENT="sandbox"
|
||||
```
|
||||
4. **Instantiate** a PaymanAI tool, passing your desired name/description.
|
||||
5. **Call** the tool with `.invoke(...)` or integrate it into a chain or agent.
|
||||
5. **Call** the tool with `.invoke(...)` or integrate it into a chain or agent.
|
||||
|
||||
@@ -473,71 +473,78 @@ class AGEGraph(GraphStore):
|
||||
@staticmethod
|
||||
def _wrap_query(query: str, graph_name: str) -> str:
|
||||
"""
|
||||
Convert a cypher query to an Apache Age compatible
|
||||
sql query by wrapping the cypher query in ag_catalog.cypher,
|
||||
casting results to agtype and building a select statement
|
||||
Convert a Cyper query to an Apache Age compatible Sql Query.
|
||||
Handles combined queries with UNION/EXCEPT operators
|
||||
|
||||
Args:
|
||||
query (str): a valid cypher query
|
||||
graph_name (str): the name of the graph to query
|
||||
query (str) : A valid cypher query, can include UNION/EXCEPT operators
|
||||
graph_name (str) : The name of the graph to query
|
||||
|
||||
Returns:
|
||||
str: an equivalent pgsql query
|
||||
Returns :
|
||||
str : An equivalent pgSql query wrapped with ag_catalog.cypher
|
||||
|
||||
Raises:
|
||||
ValueError : If query is empty, contain RETURN *, or has invalid field names
|
||||
"""
|
||||
|
||||
if not query.strip():
|
||||
raise ValueError("Empty query provided")
|
||||
|
||||
# pgsql template
|
||||
template = """SELECT {projection} FROM ag_catalog.cypher('{graph_name}', $$
|
||||
{query}
|
||||
$$) AS ({fields});"""
|
||||
|
||||
# if there are any returned fields they must be added to the pgsql query
|
||||
return_match = re.search(r'\breturn\b(?![^"]*")', query, re.IGNORECASE)
|
||||
if return_match:
|
||||
# Extract the part of the query after the RETURN keyword
|
||||
return_clause = query[return_match.end() :]
|
||||
# split the query into parts based on UNION and EXCEPT
|
||||
parts = re.split(r"\b(UNION\b|\bEXCEPT)\b", query, flags=re.IGNORECASE)
|
||||
|
||||
# parse return statement to identify returned fields
|
||||
fields = (
|
||||
return_clause.lower()
|
||||
.split("distinct")[-1]
|
||||
.split("order by")[0]
|
||||
.split("skip")[0]
|
||||
.split("limit")[0]
|
||||
.split(",")
|
||||
)
|
||||
all_fields = []
|
||||
|
||||
# raise exception if RETURN * is found as we can't resolve the fields
|
||||
if "*" in [x.strip() for x in fields]:
|
||||
raise ValueError(
|
||||
"AGE graph does not support 'RETURN *'"
|
||||
+ " statements in Cypher queries"
|
||||
for part in parts:
|
||||
if part.strip().upper() in ("UNION", "EXCEPT"):
|
||||
continue
|
||||
|
||||
# if there are any returned fields they must be added to the pgsql query
|
||||
return_match = re.search(r'\breturn\b(?![^"]*")', part, re.IGNORECASE)
|
||||
if return_match:
|
||||
# Extract the part of the query after the RETURN keyword
|
||||
return_clause = part[return_match.end() :]
|
||||
|
||||
# parse return statement to identify returned fields
|
||||
fields = (
|
||||
return_clause.lower()
|
||||
.split("distinct")[-1]
|
||||
.split("order by")[0]
|
||||
.split("skip")[0]
|
||||
.split("limit")[0]
|
||||
.split(",")
|
||||
)
|
||||
|
||||
# get pgsql formatted field names
|
||||
fields = [
|
||||
AGEGraph._get_col_name(field, idx) for idx, field in enumerate(fields)
|
||||
]
|
||||
# raise exception if RETURN * is found as we can't resolve the fields
|
||||
clean_fileds = [f.strip() for f in fields if f.strip()]
|
||||
if "*" in clean_fileds:
|
||||
raise ValueError(
|
||||
"Apache Age does not support RETURN * in Cypher queries"
|
||||
)
|
||||
|
||||
# build resulting pgsql relation
|
||||
fields_str = ", ".join(
|
||||
[
|
||||
field.split(".")[-1] + " agtype"
|
||||
for field in fields
|
||||
if field.split(".")[-1]
|
||||
]
|
||||
)
|
||||
# Format fields and maintain order of appearance
|
||||
for idx, field in enumerate(clean_fileds):
|
||||
field_name = AGEGraph._get_col_name(field, idx)
|
||||
if field_name not in all_fields:
|
||||
all_fields.append(field_name)
|
||||
|
||||
# if no return statement we still need to return a single field of type agtype
|
||||
else:
|
||||
# if no return statements found in any part
|
||||
if not all_fields:
|
||||
fields_str = "a agtype"
|
||||
|
||||
select_str = "*"
|
||||
else:
|
||||
fields_str = ", ".join(f"{field} agtype" for field in all_fields)
|
||||
|
||||
return template.format(
|
||||
graph_name=graph_name,
|
||||
query=query,
|
||||
fields=fields_str,
|
||||
projection=select_str,
|
||||
projection="*",
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
|
||||
@@ -1,15 +1,6 @@
|
||||
[build-system]
|
||||
requires = ["poetry-core>=1.0.0"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.poetry]
|
||||
name = "langchain-community"
|
||||
version = "0.3.16"
|
||||
description = "Community contributed LangChain integrations."
|
||||
authors = []
|
||||
license = "MIT"
|
||||
readme = "README.md"
|
||||
repository = "https://github.com/langchain-ai/langchain"
|
||||
requires = ["pdm-backend"]
|
||||
build-backend = "pdm.backend"
|
||||
|
||||
[tool.ruff]
|
||||
exclude = [
|
||||
@@ -27,31 +18,6 @@ skip = ".git,*.pdf,*.svg,*.pdf,*.yaml,*.ipynb,poetry.lock,*.min.js,*.css,package
|
||||
ignore-regex = ".*(Stati Uniti|Tense=Pres).*"
|
||||
ignore-words-list = "momento,collison,ned,foor,reworkd,parth,whats,aapply,mysogyny,unsecure,damon,crate,aadd,symbl,precesses,accademia,nin,cann"
|
||||
|
||||
[tool.poetry.urls]
|
||||
"Source Code" = "https://github.com/langchain-ai/langchain/tree/master/libs/community"
|
||||
"Release Notes" = "https://github.com/langchain-ai/langchain/releases?q=tag%3A%22langchain-community%3D%3D0%22&expanded=true"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.9,<4.0"
|
||||
langchain-core = "^0.3.32"
|
||||
langchain = "^0.3.16"
|
||||
SQLAlchemy = ">=1.4,<3"
|
||||
requests = "^2"
|
||||
PyYAML = ">=5.3"
|
||||
aiohttp = "^3.8.3"
|
||||
tenacity = ">=8.1.0,!=8.4.0,<10"
|
||||
dataclasses-json = ">= 0.5.7, < 0.7"
|
||||
pydantic-settings = "^2.4.0"
|
||||
langsmith = ">=0.1.125,<0.4"
|
||||
httpx-sse = "^0.4.0"
|
||||
[[tool.poetry.dependencies.numpy]]
|
||||
version = ">=1.22.4,<2"
|
||||
python = "<3.12"
|
||||
|
||||
[[tool.poetry.dependencies.numpy]]
|
||||
version = ">=1.26.2,<3"
|
||||
python = ">=3.12"
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = ["E", "F", "I", "T201"]
|
||||
|
||||
@@ -72,100 +38,142 @@ filterwarnings = [
|
||||
"ignore::langchain_core._api.deprecation.LangChainPendingDeprecationWarning:test",
|
||||
]
|
||||
|
||||
[tool.poetry.group.test]
|
||||
optional = true
|
||||
[tool.pdm.dev-dependencies]
|
||||
test = [
|
||||
"pytest<8.0.0,>=7.4.4",
|
||||
"pytest-cov<5.0.0,>=4.1.0",
|
||||
"pytest-dotenv<1.0.0,>=0.5.2",
|
||||
"duckdb-engine<1.0.0,>=0.13.6",
|
||||
"pytest-watcher<1.0.0,>=0.2.6",
|
||||
"freezegun<2.0.0,>=1.2.2",
|
||||
"responses<1.0.0,>=0.22.0",
|
||||
"pytest-asyncio<1.0.0,>=0.20.3",
|
||||
"lark<2.0.0,>=1.1.5",
|
||||
"pandas<3.0.0,>=2.0.0",
|
||||
"pytest-mock<4.0.0,>=3.10.0",
|
||||
"pytest-socket<1.0.0,>=0.6.0",
|
||||
"syrupy<5.0.0,>=4.0.2",
|
||||
"requests-mock<2.0.0,>=1.11.0",
|
||||
"pytest-xdist<4.0.0,>=3.6.1",
|
||||
"cffi<1.17.1; python_version < \"3.10\"",
|
||||
"cffi; python_version >= \"3.10\"",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../core",
|
||||
"langchain @ file:///${PROJECT_ROOT}/../langchain",
|
||||
"langchain-tests @ file:///${PROJECT_ROOT}/../standard-tests",
|
||||
]
|
||||
codespell = [
|
||||
"codespell<3.0.0,>=2.2.0",
|
||||
]
|
||||
test_integration = [
|
||||
"pytest-vcr<2.0.0,>=1.0.2",
|
||||
"vcrpy<7,>=6",
|
||||
]
|
||||
lint = [
|
||||
"ruff<1.0,>=0.5",
|
||||
"cffi<1.17.1; python_version < \"3.10\"",
|
||||
"cffi; python_version >= \"3.10\"",
|
||||
]
|
||||
dev = [
|
||||
"jupyter<2.0.0,>=1.0.0",
|
||||
"setuptools<68.0.0,>=67.6.1",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../core",
|
||||
]
|
||||
typing = [
|
||||
"mypy<2.0,>=1.12",
|
||||
"types-pyyaml<7.0.0.0,>=6.0.12.2",
|
||||
"types-requests<3.0.0.0,>=2.28.11.5",
|
||||
"types-toml<1.0.0.0,>=0.10.8.1",
|
||||
"types-pytz<2024.0.0.0,>=2023.3.0.0",
|
||||
"types-chardet<6.0.0.0,>=5.0.4.6",
|
||||
"types-redis<5.0.0.0,>=4.3.21.6",
|
||||
"mypy-protobuf<4.0.0,>=3.0.0",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../core",
|
||||
"langchain-text-splitters @ file:///${PROJECT_ROOT}/../text-splitters",
|
||||
"langchain @ file:///${PROJECT_ROOT}/../langchain",
|
||||
]
|
||||
|
||||
[tool.poetry.group.codespell]
|
||||
optional = true
|
||||
[tool.pdm.build]
|
||||
includes = []
|
||||
|
||||
[tool.poetry.group.test_integration]
|
||||
optional = true
|
||||
[project]
|
||||
authors = []
|
||||
license = {text = "MIT"}
|
||||
requires-python = "<4.0,>=3.9"
|
||||
dependencies = [
|
||||
"langchain-core<1.0.0,>=0.3.32",
|
||||
"langchain<1.0.0,>=0.3.16",
|
||||
"SQLAlchemy<3,>=1.4",
|
||||
"requests<3,>=2",
|
||||
"PyYAML>=5.3",
|
||||
"aiohttp<4.0.0,>=3.8.3",
|
||||
"tenacity!=8.4.0,<10,>=8.1.0",
|
||||
"dataclasses-json<0.7,>=0.5.7",
|
||||
"pydantic-settings<3.0.0,>=2.4.0",
|
||||
"langsmith<0.4,>=0.1.125",
|
||||
"httpx-sse<1.0.0,>=0.4.0",
|
||||
"numpy<2,>=1.22.4; python_version < \"3.12\"",
|
||||
"numpy<3,>=1.26.2; python_version >= \"3.12\"",
|
||||
]
|
||||
name = "langchain-community"
|
||||
version = "0.3.16"
|
||||
description = "Community contributed LangChain integrations."
|
||||
readme = "README.md"
|
||||
|
||||
[tool.poetry.group.lint]
|
||||
optional = true
|
||||
[project.urls]
|
||||
"Source Code" = "https://github.com/langchain-ai/langchain/tree/master/libs/community"
|
||||
"Release Notes" = "https://github.com/langchain-ai/langchain/releases?q=tag%3A%22langchain-community%3D%3D0%22&expanded=true"
|
||||
repository = "https://github.com/langchain-ai/langchain"
|
||||
|
||||
[tool.poetry.group.dev]
|
||||
optional = true
|
||||
|
||||
[tool.poetry.group.test.dependencies]
|
||||
pytest = "^7.4.4"
|
||||
pytest-cov = "^4.1.0"
|
||||
pytest-dotenv = "^0.5.2"
|
||||
duckdb-engine = "^0.13.6"
|
||||
pytest-watcher = "^0.2.6"
|
||||
freezegun = "^1.2.2"
|
||||
responses = "^0.22.0"
|
||||
pytest-asyncio = "^0.20.3"
|
||||
lark = "^1.1.5"
|
||||
pandas = "^2.0.0"
|
||||
pytest-mock = "^3.10.0"
|
||||
pytest-socket = "^0.6.0"
|
||||
syrupy = "^4.0.2"
|
||||
requests-mock = "^1.11.0"
|
||||
pytest-xdist = "^3.6.1"
|
||||
[[tool.poetry.group.test.dependencies.cffi]]
|
||||
version = "<1.17.1"
|
||||
python = "<3.10"
|
||||
|
||||
[[tool.poetry.group.test.dependencies.cffi]]
|
||||
version = "*"
|
||||
python = ">=3.10"
|
||||
|
||||
[tool.poetry.group.codespell.dependencies]
|
||||
codespell = "^2.2.0"
|
||||
|
||||
[tool.poetry.group.test_integration.dependencies]
|
||||
pytest-vcr = "^1.0.2"
|
||||
vcrpy = "^6"
|
||||
|
||||
[tool.poetry.group.lint.dependencies]
|
||||
ruff = "^0.5"
|
||||
[[tool.poetry.group.lint.dependencies.cffi]]
|
||||
version = "<1.17.1"
|
||||
python = "<3.10"
|
||||
|
||||
[[tool.poetry.group.lint.dependencies.cffi]]
|
||||
version = "*"
|
||||
python = ">=3.10"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
jupyter = "^1.0.0"
|
||||
setuptools = "^67.6.1"
|
||||
|
||||
[tool.poetry.group.typing.dependencies]
|
||||
mypy = "^1.12"
|
||||
types-pyyaml = "^6.0.12.2"
|
||||
types-requests = "^2.28.11.5"
|
||||
types-toml = "^0.10.8.1"
|
||||
types-pytz = "^2023.3.0.0"
|
||||
types-chardet = "^5.0.4.6"
|
||||
types-redis = "^4.3.21.6"
|
||||
mypy-protobuf = "^3.0.0"
|
||||
|
||||
[tool.poetry.group.test.dependencies.langchain-core]
|
||||
path = "../core"
|
||||
develop = true
|
||||
|
||||
[tool.poetry.group.test.dependencies.langchain]
|
||||
path = "../langchain"
|
||||
develop = true
|
||||
|
||||
[tool.poetry.group.test.dependencies.langchain-tests]
|
||||
path = "../standard-tests"
|
||||
develop = true
|
||||
|
||||
[tool.poetry.group.dev.dependencies.langchain-core]
|
||||
path = "../core"
|
||||
develop = true
|
||||
|
||||
[tool.poetry.group.typing.dependencies.langchain-core]
|
||||
path = "../core"
|
||||
develop = true
|
||||
|
||||
[tool.poetry.group.typing.dependencies.langchain-text-splitters]
|
||||
path = "../text-splitters"
|
||||
develop = true
|
||||
|
||||
[tool.poetry.group.typing.dependencies.langchain]
|
||||
path = "../langchain"
|
||||
develop = true
|
||||
[dependency-groups]
|
||||
test = [
|
||||
"pytest<8.0.0,>=7.4.4",
|
||||
"pytest-cov<5.0.0,>=4.1.0",
|
||||
"pytest-dotenv<1.0.0,>=0.5.2",
|
||||
"duckdb-engine<1.0.0,>=0.13.6",
|
||||
"pytest-watcher<1.0.0,>=0.2.6",
|
||||
"freezegun<2.0.0,>=1.2.2",
|
||||
"responses<1.0.0,>=0.22.0",
|
||||
"pytest-asyncio<1.0.0,>=0.20.3",
|
||||
"lark<2.0.0,>=1.1.5",
|
||||
"pandas<3.0.0,>=2.0.0",
|
||||
"pytest-mock<4.0.0,>=3.10.0",
|
||||
"pytest-socket<1.0.0,>=0.6.0",
|
||||
"syrupy<5.0.0,>=4.0.2",
|
||||
"requests-mock<2.0.0,>=1.11.0",
|
||||
"pytest-xdist<4.0.0,>=3.6.1",
|
||||
"cffi<1.17.1; python_version < \"3.10\"",
|
||||
"cffi; python_version >= \"3.10\"",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../core",
|
||||
"langchain @ file:///${PROJECT_ROOT}/../langchain",
|
||||
"langchain-tests @ file:///${PROJECT_ROOT}/../standard-tests",
|
||||
]
|
||||
codespell = [
|
||||
"codespell<3.0.0,>=2.2.0",
|
||||
]
|
||||
test_integration = [
|
||||
"pytest-vcr<2.0.0,>=1.0.2",
|
||||
"vcrpy<7,>=6",
|
||||
]
|
||||
lint = [
|
||||
"ruff<1.0,>=0.5",
|
||||
"cffi<1.17.1; python_version < \"3.10\"",
|
||||
"cffi; python_version >= \"3.10\"",
|
||||
]
|
||||
dev = [
|
||||
"jupyter<2.0.0,>=1.0.0",
|
||||
"setuptools<68.0.0,>=67.6.1",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../core",
|
||||
]
|
||||
typing = [
|
||||
"mypy<2.0,>=1.12",
|
||||
"types-pyyaml<7.0.0.0,>=6.0.12.2",
|
||||
"types-requests<3.0.0.0,>=2.28.11.5",
|
||||
"types-toml<1.0.0.0,>=0.10.8.1",
|
||||
"types-pytz<2024.0.0.0,>=2023.3.0.0",
|
||||
"types-chardet<6.0.0.0,>=5.0.4.6",
|
||||
"types-redis<5.0.0.0,>=4.3.21.6",
|
||||
"mypy-protobuf<4.0.0,>=3.0.0",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../core",
|
||||
"langchain-text-splitters @ file:///${PROJECT_ROOT}/../text-splitters",
|
||||
"langchain @ file:///${PROJECT_ROOT}/../langchain",
|
||||
]
|
||||
|
||||
@@ -0,0 +1,33 @@
|
||||
"""Standard LangChain interface tests"""
|
||||
|
||||
from typing import Type
|
||||
|
||||
import pytest
|
||||
from langchain_core.language_models import BaseChatModel
|
||||
from langchain_tests.integration_tests import ChatModelIntegrationTests
|
||||
|
||||
from langchain_community.chat_models import ChatPerplexity
|
||||
|
||||
|
||||
class TestPerplexityStandard(ChatModelIntegrationTests):
|
||||
@property
|
||||
def chat_model_class(self) -> Type[BaseChatModel]:
|
||||
return ChatPerplexity
|
||||
|
||||
@property
|
||||
def chat_model_params(self) -> dict:
|
||||
return {"model": "sonar"}
|
||||
|
||||
@property
|
||||
def returns_usage_metadata(self) -> bool:
|
||||
# TODO: add usage metadata and delete this property
|
||||
# https://docs.perplexity.ai/api-reference/chat-completions#response-usage
|
||||
return False
|
||||
|
||||
@pytest.mark.xfail(reason="TODO: handle in integration.")
|
||||
def test_double_messages_conversation(self, model: BaseChatModel) -> None:
|
||||
super().test_double_messages_conversation(model)
|
||||
|
||||
@pytest.mark.xfail(reason="Raises 400: Custom stop words not supported.")
|
||||
def test_stop_sequence(self, model: BaseChatModel) -> None:
|
||||
super().test_stop_sequence(model)
|
||||
@@ -1,11 +1,13 @@
|
||||
"""Test Perplexity Chat API wrapper."""
|
||||
|
||||
import os
|
||||
from typing import Any, Dict, List, Optional
|
||||
from typing import Any, Dict, List, Optional, Tuple, Type
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
from langchain_core.language_models import BaseChatModel
|
||||
from langchain_core.messages import AIMessageChunk, BaseMessageChunk
|
||||
from langchain_tests.unit_tests import ChatModelUnitTests
|
||||
from pytest_mock import MockerFixture
|
||||
|
||||
from langchain_community.chat_models import ChatPerplexity
|
||||
@@ -13,6 +15,21 @@ from langchain_community.chat_models import ChatPerplexity
|
||||
os.environ["PPLX_API_KEY"] = "foo"
|
||||
|
||||
|
||||
@pytest.mark.requires("openai")
|
||||
class TestPerplexityStandard(ChatModelUnitTests):
|
||||
@property
|
||||
def chat_model_class(self) -> Type[BaseChatModel]:
|
||||
return ChatPerplexity
|
||||
|
||||
@property
|
||||
def init_from_env_params(self) -> Tuple[dict, dict, dict]:
|
||||
return (
|
||||
{"PPLX_API_KEY": "api_key"},
|
||||
{},
|
||||
{"pplx_api_key": "api_key"},
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.requires("openai")
|
||||
def test_perplexity_model_name_param() -> None:
|
||||
llm = ChatPerplexity(model="foo") # type: ignore[call-arg]
|
||||
|
||||
@@ -53,6 +53,7 @@ class TestAGEGraph(unittest.TestCase):
|
||||
self.assertEqual(AGEGraph._get_col_name(*value), expected[idx])
|
||||
|
||||
def test_wrap_query(self) -> None:
|
||||
"""Test basic query wrapping functionality."""
|
||||
inputs = [
|
||||
# Positive case: Simple return clause
|
||||
"""
|
||||
@@ -76,46 +77,195 @@ class TestAGEGraph(unittest.TestCase):
|
||||
|
||||
expected = [
|
||||
# Expected output for the first positive case
|
||||
"""
|
||||
SELECT * FROM ag_catalog.cypher('test', $$
|
||||
MATCH (keanu:Person {name:'Keanu Reeves'})
|
||||
RETURN keanu.name AS name, keanu.born AS born
|
||||
$$) AS (name agtype, born agtype);
|
||||
""",
|
||||
# Second test case (no RETURN clause)
|
||||
"""
|
||||
SELECT * FROM ag_catalog.cypher('test', $$
|
||||
MERGE (n:a {id: 1})
|
||||
$$) AS (a agtype);
|
||||
""",
|
||||
# Expected output for the negative cases (no RETURN clause)
|
||||
"""
|
||||
SELECT * FROM ag_catalog.cypher('test', $$
|
||||
MATCH (n {description: "This will return a value"})
|
||||
MERGE (n)-[:RELATED]->(m)
|
||||
$$) AS (a agtype);
|
||||
""",
|
||||
"""
|
||||
SELECT * FROM ag_catalog.cypher('test', $$
|
||||
MATCH (n {returnValue: "some value"})
|
||||
MERGE (n)-[:RELATED]->(m)
|
||||
$$) AS (a agtype);
|
||||
""",
|
||||
]
|
||||
|
||||
for idx, value in enumerate(inputs):
|
||||
result = AGEGraph._wrap_query(value, "test")
|
||||
expected_result = expected[idx]
|
||||
self.assertEqual(
|
||||
re.sub(r"\s", "", result),
|
||||
re.sub(r"\s", "", expected_result),
|
||||
(
|
||||
f"Failed on test case {idx + 1}\n"
|
||||
f"Input:\n{value}\n"
|
||||
f"Expected:\n{expected_result}\n"
|
||||
f"Got:\n{result}"
|
||||
),
|
||||
)
|
||||
|
||||
def test_wrap_query_union_except(self) -> None:
|
||||
"""Test query wrapping with UNION and EXCEPT operators."""
|
||||
inputs = [
|
||||
# UNION case
|
||||
"""
|
||||
MATCH (n:Person)
|
||||
RETURN n.name AS name, n.age AS age
|
||||
UNION
|
||||
MATCH (n:Employee)
|
||||
RETURN n.name AS name, n.salary AS salary
|
||||
""",
|
||||
"""
|
||||
MATCH (a:Employee {name: "Alice"})
|
||||
RETURN a.name AS name
|
||||
UNION
|
||||
MATCH (b:Manager {name: "Bob"})
|
||||
RETURN b.name AS name
|
||||
""",
|
||||
# Complex UNION case
|
||||
"""
|
||||
MATCH (n)-[r]->(m)
|
||||
RETURN n.name AS source, type(r) AS relationship, m.name AS target
|
||||
UNION
|
||||
MATCH (m)-[r]->(n)
|
||||
RETURN m.name AS source, type(r) AS relationship, n.name AS target
|
||||
""",
|
||||
"""
|
||||
MATCH (a:Person)-[:FRIEND]->(b:Person)
|
||||
WHERE a.age > 30
|
||||
RETURN a.name AS name
|
||||
UNION
|
||||
MATCH (c:Person)-[:FRIEND]->(d:Person)
|
||||
WHERE c.age < 25
|
||||
RETURN c.name AS name
|
||||
""",
|
||||
# EXCEPT case
|
||||
"""
|
||||
MATCH (n:Person)
|
||||
RETURN n.name AS name
|
||||
EXCEPT
|
||||
MATCH (n:Employee)
|
||||
RETURN n.name AS name
|
||||
""",
|
||||
"""
|
||||
MATCH (a:Person)
|
||||
RETURN a.name AS name, a.age AS age
|
||||
EXCEPT
|
||||
MATCH (b:Person {name: "Alice", age: 30})
|
||||
RETURN b.name AS name, b.age AS age
|
||||
""",
|
||||
]
|
||||
|
||||
expected = [
|
||||
"""
|
||||
SELECT * FROM ag_catalog.cypher('test', $$
|
||||
MATCH (keanu:Person {name:'Keanu Reeves'})
|
||||
RETURN keanu.name AS name, keanu.born AS born
|
||||
$$) AS (name agtype, born agtype);
|
||||
MATCH (n:Person)
|
||||
RETURN n.name AS name, n.age AS age
|
||||
UNION
|
||||
MATCH (n:Employee)
|
||||
RETURN n.name AS name, n.salary AS salary
|
||||
$$) AS (name agtype, age agtype, salary agtype);
|
||||
""",
|
||||
"""
|
||||
SELECT * FROM ag_catalog.cypher('test', $$
|
||||
MERGE (n:a {id: 1})
|
||||
$$) AS (a agtype);
|
||||
""",
|
||||
# Expected output for the negative cases (no return clause)
|
||||
"""
|
||||
SELECT * FROM ag_catalog.cypher('test', $$
|
||||
MATCH (n {description: "This will return a value"})
|
||||
MERGE (n)-[:RELATED]->(m)
|
||||
$$) AS (a agtype);
|
||||
MATCH (a:Employee {name: "Alice"})
|
||||
RETURN a.name AS name
|
||||
UNION
|
||||
MATCH (b:Manager {name: "Bob"})
|
||||
RETURN b.name AS name
|
||||
$$) AS (name agtype);
|
||||
""",
|
||||
"""
|
||||
SELECT * FROM ag_catalog.cypher('test', $$
|
||||
MATCH (n {returnValue: "some value"})
|
||||
MERGE (n)-[:RELATED]->(m)
|
||||
$$) AS (a agtype);
|
||||
MATCH (n)-[r]->(m)
|
||||
RETURN n.name AS source, type(r) AS relationship, m.name AS target
|
||||
UNION
|
||||
MATCH (m)-[r]->(n)
|
||||
RETURN m.name AS source, type(r) AS relationship, n.name AS target
|
||||
$$) AS (source agtype, relationship agtype, target agtype);
|
||||
""",
|
||||
"""
|
||||
SELECT * FROM ag_catalog.cypher('test', $$
|
||||
MATCH (a:Person)-[:FRIEND]->(b:Person)
|
||||
WHERE a.age > 30
|
||||
RETURN a.name AS name
|
||||
UNION
|
||||
MATCH (c:Person)-[:FRIEND]->(d:Person)
|
||||
WHERE c.age < 25
|
||||
RETURN c.name AS name
|
||||
$$) AS (name agtype);
|
||||
""",
|
||||
"""
|
||||
SELECT * FROM ag_catalog.cypher('test', $$
|
||||
MATCH (n:Person)
|
||||
RETURN n.name AS name
|
||||
EXCEPT
|
||||
MATCH (n:Employee)
|
||||
RETURN n.name AS name
|
||||
$$) AS (name agtype);
|
||||
""",
|
||||
"""
|
||||
SELECT * FROM ag_catalog.cypher('test', $$
|
||||
MATCH (a:Person)
|
||||
RETURN a.name AS name, a.age AS age
|
||||
EXCEPT
|
||||
MATCH (b:Person {name: "Alice", age: 30})
|
||||
RETURN b.name AS name, b.age AS age
|
||||
$$) AS (name agtype, age agtype);
|
||||
""",
|
||||
]
|
||||
|
||||
for idx, value in enumerate(inputs):
|
||||
result = AGEGraph._wrap_query(value, "test")
|
||||
expected_result = expected[idx]
|
||||
self.assertEqual(
|
||||
re.sub(r"\s", "", AGEGraph._wrap_query(value, "test")),
|
||||
re.sub(r"\s", "", expected[idx]),
|
||||
re.sub(r"\s", "", result),
|
||||
re.sub(r"\s", "", expected_result),
|
||||
(
|
||||
f"Failed on test case {idx + 1}\n"
|
||||
f"Input:\n{value}\n"
|
||||
f"Expected:\n{expected_result}\n"
|
||||
f"Got:\n{result}"
|
||||
),
|
||||
)
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
AGEGraph._wrap_query(
|
||||
"""
|
||||
def test_wrap_query_errors(self) -> None:
|
||||
"""Test error cases for query wrapping."""
|
||||
error_cases = [
|
||||
# Empty query
|
||||
"",
|
||||
# Return * case
|
||||
"""
|
||||
MATCH ()
|
||||
RETURN *
|
||||
""",
|
||||
"test",
|
||||
)
|
||||
# Return * in UNION
|
||||
"""
|
||||
MATCH (n:Person)
|
||||
RETURN n.name
|
||||
UNION
|
||||
MATCH ()
|
||||
RETURN *
|
||||
""",
|
||||
]
|
||||
|
||||
for query in error_cases:
|
||||
with self.assertRaises(ValueError):
|
||||
AGEGraph._wrap_query(query, "test")
|
||||
|
||||
def test_format_properties(self) -> None:
|
||||
inputs: List[Dict[str, Any]] = [{}, {"a": "b"}, {"a": "b", "c": 1, "d": True}]
|
||||
|
||||
4153
libs/community/uv.lock
generated
Normal file
4153
libs/community/uv.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
@@ -12,7 +12,7 @@ test tests:
|
||||
-u LANGCHAIN_API_KEY \
|
||||
-u LANGSMITH_TRACING \
|
||||
-u LANGCHAIN_PROJECT \
|
||||
poetry run pytest -n auto --disable-socket --allow-unix-socket $(TEST_FILE)
|
||||
uv run --group test pytest -n auto --disable-socket --allow-unix-socket $(TEST_FILE)
|
||||
|
||||
test_watch:
|
||||
env \
|
||||
@@ -20,16 +20,16 @@ test_watch:
|
||||
-u LANGCHAIN_API_KEY \
|
||||
-u LANGSMITH_TRACING \
|
||||
-u LANGCHAIN_PROJECT \
|
||||
poetry run ptw --snapshot-update --now . --disable-socket --allow-unix-socket -- -vv $(TEST_FILE)
|
||||
uv run --group test ptw --snapshot-update --now . --disable-socket --allow-unix-socket -- -vv $(TEST_FILE)
|
||||
|
||||
test_profile:
|
||||
poetry run pytest -vv tests/unit_tests/ --profile-svg
|
||||
uv run --group test pytest -vv tests/unit_tests/ --profile-svg
|
||||
|
||||
check_imports: $(shell find langchain_core -name '*.py')
|
||||
poetry run python ./scripts/check_imports.py $^
|
||||
uv run --group test python ./scripts/check_imports.py $^
|
||||
|
||||
extended_tests:
|
||||
poetry run pytest --only-extended --disable-socket --allow-unix-socket $(TEST_FILE)
|
||||
uv run --group test pytest --only-extended --disable-socket --allow-unix-socket $(TEST_FILE)
|
||||
|
||||
|
||||
######################
|
||||
@@ -47,19 +47,19 @@ lint_tests: MYPY_CACHE=.mypy_cache_test
|
||||
|
||||
lint lint_diff lint_package lint_tests:
|
||||
./scripts/lint_imports.sh
|
||||
[ "$(PYTHON_FILES)" = "" ] || poetry run ruff check $(PYTHON_FILES)
|
||||
[ "$(PYTHON_FILES)" = "" ] || poetry run ruff format $(PYTHON_FILES) --diff
|
||||
[ "$(PYTHON_FILES)" = "" ] || mkdir -p $(MYPY_CACHE) && poetry run mypy $(PYTHON_FILES) --cache-dir $(MYPY_CACHE)
|
||||
[ "$(PYTHON_FILES)" = "" ] || uv run --all-groups ruff check $(PYTHON_FILES)
|
||||
[ "$(PYTHON_FILES)" = "" ] || uv run --all-groups ruff format $(PYTHON_FILES) --diff
|
||||
[ "$(PYTHON_FILES)" = "" ] || mkdir -p $(MYPY_CACHE) && uv run --all-groups mypy $(PYTHON_FILES) --cache-dir $(MYPY_CACHE)
|
||||
|
||||
format format_diff:
|
||||
[ "$(PYTHON_FILES)" = "" ] || poetry run ruff format $(PYTHON_FILES)
|
||||
[ "$(PYTHON_FILES)" = "" ] || poetry run ruff check --fix $(PYTHON_FILES)
|
||||
[ "$(PYTHON_FILES)" = "" ] || uv run --all-groups ruff format $(PYTHON_FILES)
|
||||
[ "$(PYTHON_FILES)" = "" ] || uv run --all-groups ruff check --fix $(PYTHON_FILES)
|
||||
|
||||
spell_check:
|
||||
poetry run codespell --toml pyproject.toml
|
||||
uv run --all-groups codespell --toml pyproject.toml
|
||||
|
||||
spell_fix:
|
||||
poetry run codespell --toml pyproject.toml -w
|
||||
uv run --all-groups codespell --toml pyproject.toml -w
|
||||
|
||||
######################
|
||||
# HELP
|
||||
|
||||
@@ -1,16 +1,70 @@
|
||||
[build-system]
|
||||
requires = [ "poetry-core>=1.0.0",]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
requires = ["pdm-backend"]
|
||||
build-backend = "pdm.backend"
|
||||
|
||||
[tool.poetry]
|
||||
[project]
|
||||
authors = []
|
||||
license = {text = "MIT"}
|
||||
requires-python = "<4.0,>=3.9"
|
||||
dependencies = [
|
||||
"langsmith<0.4,>=0.1.125",
|
||||
"tenacity!=8.4.0,<10.0.0,>=8.1.0",
|
||||
"jsonpatch<2.0,>=1.33",
|
||||
"PyYAML>=5.3",
|
||||
"packaging<25,>=23.2",
|
||||
"typing-extensions>=4.7",
|
||||
"pydantic<3.0.0,>=2.5.2; python_full_version < \"3.12.4\"",
|
||||
"pydantic<3.0.0,>=2.7.4; python_full_version >= \"3.12.4\"",
|
||||
]
|
||||
name = "langchain-core"
|
||||
version = "0.3.33"
|
||||
description = "Building applications with LLMs through composability"
|
||||
authors = []
|
||||
license = "MIT"
|
||||
readme = "README.md"
|
||||
|
||||
[project.urls]
|
||||
"Source Code" = "https://github.com/langchain-ai/langchain/tree/master/libs/core"
|
||||
"Release Notes" = "https://github.com/langchain-ai/langchain/releases?q=tag%3A%22langchain-core%3D%3D0%22&expanded=true"
|
||||
repository = "https://github.com/langchain-ai/langchain"
|
||||
|
||||
[dependency-groups]
|
||||
lint = [
|
||||
"ruff<1.0.0,>=0.9.2",
|
||||
]
|
||||
typing = [
|
||||
"mypy<1.11,>=1.10",
|
||||
"types-pyyaml<7.0.0.0,>=6.0.12.2",
|
||||
"types-requests<3.0.0.0,>=2.28.11.5",
|
||||
"types-jinja2<3.0.0,>=2.11.9",
|
||||
"langchain-text-splitters",
|
||||
]
|
||||
dev = [
|
||||
"jupyter<2.0.0,>=1.0.0",
|
||||
"setuptools<68.0.0,>=67.6.1",
|
||||
"grandalf<1.0,>=0.8",
|
||||
]
|
||||
test = [
|
||||
"pytest<9,>=8",
|
||||
"freezegun<2.0.0,>=1.2.2",
|
||||
"pytest-mock<4.0.0,>=3.10.0",
|
||||
"syrupy<5.0.0,>=4.0.2",
|
||||
"pytest-watcher<1.0.0,>=0.3.4",
|
||||
"pytest-asyncio<1.0.0,>=0.21.1",
|
||||
"grandalf<1.0,>=0.8",
|
||||
"responses<1.0.0,>=0.25.0",
|
||||
"pytest-socket<1.0.0,>=0.7.0",
|
||||
"pytest-xdist<4.0.0,>=3.6.1",
|
||||
"blockbuster~=1.5.11",
|
||||
"numpy<2.0.0,>=1.24.0; python_version < \"3.12\"",
|
||||
"numpy<3,>=1.26.0; python_version >= \"3.12\"",
|
||||
"langchain-tests",
|
||||
]
|
||||
test_integration = []
|
||||
|
||||
[tool.uv.sources]
|
||||
langchain-tests = { path = "../standard-tests" }
|
||||
langchain-text-splitters = { path = "../text-splitters" }
|
||||
|
||||
|
||||
[tool.mypy]
|
||||
exclude = [ "notebooks", "examples", "example_data", "langchain_core/pydantic", "tests/unit_tests/utils/test_function_calling.py",]
|
||||
disallow_untyped_defs = "True"
|
||||
@@ -21,27 +75,6 @@ ignore_missing_imports = true
|
||||
[tool.ruff]
|
||||
target-version = "py39"
|
||||
|
||||
[tool.poetry.urls]
|
||||
"Source Code" = "https://github.com/langchain-ai/langchain/tree/master/libs/core"
|
||||
"Release Notes" = "https://github.com/langchain-ai/langchain/releases?q=tag%3A%22langchain-core%3D%3D0%22&expanded=true"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.9,<4.0"
|
||||
langsmith = ">=0.1.125,<0.4"
|
||||
tenacity = ">=8.1.0,!=8.4.0,<10.0.0"
|
||||
jsonpatch = "^1.33"
|
||||
PyYAML = ">=5.3"
|
||||
packaging = ">=23.2,<25"
|
||||
typing-extensions = ">=4.7"
|
||||
[[tool.poetry.dependencies.pydantic]]
|
||||
version = "^2.5.2"
|
||||
python = "<3.12.4"
|
||||
|
||||
[[tool.poetry.dependencies.pydantic]]
|
||||
version = "^2.7.4"
|
||||
python = ">=3.12.4"
|
||||
|
||||
[tool.poetry.extras]
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = [ "ASYNC", "B", "C4", "COM", "DJ", "E", "EM", "EXE", "F", "FLY", "FURB", "I", "ICN", "INT", "LOG", "N", "NPY", "PD", "PIE", "Q", "RSE", "S", "SIM", "SLOT", "T10", "T201", "TID", "TRY", "UP", "W", "YTT",]
|
||||
@@ -56,21 +89,6 @@ markers = [ "requires: mark tests as requiring a specific library", "compile: ma
|
||||
asyncio_mode = "auto"
|
||||
filterwarnings = [ "ignore::langchain_core._api.beta_decorator.LangChainBetaWarning",]
|
||||
|
||||
[tool.poetry.group.lint]
|
||||
optional = true
|
||||
|
||||
[tool.poetry.group.typing]
|
||||
optional = true
|
||||
|
||||
[tool.poetry.group.dev]
|
||||
optional = true
|
||||
|
||||
[tool.poetry.group.test]
|
||||
optional = true
|
||||
|
||||
[tool.poetry.group.test_integration]
|
||||
optional = true
|
||||
|
||||
[tool.ruff.lint.pep8-naming]
|
||||
classmethod-decorators = [ "classmethod", "langchain_core.utils.pydantic.pre_init", "pydantic.field_validator", "pydantic.v1.root_validator",]
|
||||
|
||||
@@ -80,71 +98,3 @@ classmethod-decorators = [ "classmethod", "langchain_core.utils.pydantic.pre_ini
|
||||
"tests/unit_tests/runnables/test_graph.py" = [ "E501",]
|
||||
"tests/**" = [ "S",]
|
||||
"scripts/**" = [ "S",]
|
||||
|
||||
[tool.poetry.group.lint.dependencies]
|
||||
ruff = "^0.9.2"
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
[tool.poetry.group.typing.dependencies]
|
||||
mypy = ">=1.10,<1.11"
|
||||
types-pyyaml = "^6.0.12.2"
|
||||
types-requests = "^2.28.11.5"
|
||||
types-jinja2 = "^2.11.9"
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
jupyter = "^1.0.0"
|
||||
setuptools = "^67.6.1"
|
||||
grandalf = "^0.8"
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
[tool.poetry.group.test.dependencies]
|
||||
pytest = "^8"
|
||||
freezegun = "^1.2.2"
|
||||
pytest-mock = "^3.10.0"
|
||||
syrupy = "^4.0.2"
|
||||
pytest-watcher = "^0.3.4"
|
||||
pytest-asyncio = "^0.21.1"
|
||||
grandalf = "^0.8"
|
||||
responses = "^0.25.0"
|
||||
pytest-socket = "^0.7.0"
|
||||
pytest-xdist = "^3.6.1"
|
||||
blockbuster = "~1.5.11"
|
||||
[[tool.poetry.group.test.dependencies.numpy]]
|
||||
version = "^1.24.0"
|
||||
python = "<3.12"
|
||||
|
||||
[[tool.poetry.group.test.dependencies.numpy]]
|
||||
version = ">=1.26.0,<3"
|
||||
python = ">=3.12"
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
[tool.poetry.group.test_integration.dependencies]
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
[tool.poetry.group.typing.dependencies.langchain-text-splitters]
|
||||
path = "../text-splitters"
|
||||
develop = true
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
[tool.poetry.group.test.dependencies.langchain-tests]
|
||||
path = "../standard-tests"
|
||||
develop = true
|
||||
|
||||
2707
libs/core/uv.lock
generated
Normal file
2707
libs/core/uv.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
@@ -12,33 +12,33 @@ TEST_FILE ?= tests/unit_tests/
|
||||
|
||||
# Run unit tests and generate a coverage report.
|
||||
coverage:
|
||||
poetry run pytest --cov \
|
||||
uv run --group test pytest --cov \
|
||||
--cov-config=.coveragerc \
|
||||
--cov-report xml \
|
||||
--cov-report term-missing:skip-covered \
|
||||
$(TEST_FILE)
|
||||
|
||||
test tests:
|
||||
poetry run pytest -n auto --disable-socket --allow-unix-socket $(TEST_FILE)
|
||||
uv run --group test pytest -n auto --disable-socket --allow-unix-socket $(TEST_FILE)
|
||||
|
||||
extended_tests:
|
||||
poetry run pytest --disable-socket --allow-unix-socket --only-extended tests/unit_tests
|
||||
uv run --group test pytest --disable-socket --allow-unix-socket --only-extended tests/unit_tests
|
||||
|
||||
test_watch:
|
||||
poetry run ptw --snapshot-update --now . -- -x --disable-socket --allow-unix-socket --disable-warnings tests/unit_tests
|
||||
uv run --group test ptw --snapshot-update --now . -- -x --disable-socket --allow-unix-socket --disable-warnings tests/unit_tests
|
||||
|
||||
test_watch_extended:
|
||||
poetry run ptw --snapshot-update --now . -- -x --disable-socket --allow-unix-socket --only-extended tests/unit_tests
|
||||
uv run --group test ptw --snapshot-update --now . -- -x --disable-socket --allow-unix-socket --only-extended tests/unit_tests
|
||||
|
||||
integration_tests:
|
||||
poetry run pytest tests/integration_tests
|
||||
uv run --group test_integration pytest tests/integration_tests
|
||||
|
||||
docker_tests:
|
||||
docker build -t my-langchain-image:test .
|
||||
docker run --rm my-langchain-image:test
|
||||
|
||||
check_imports: $(shell find langchain -name '*.py')
|
||||
poetry run python ./scripts/check_imports.py $^
|
||||
uv run python ./scripts/check_imports.py $^
|
||||
|
||||
######################
|
||||
# LINTING AND FORMATTING
|
||||
@@ -55,19 +55,19 @@ lint_tests: MYPY_CACHE=.mypy_cache_test
|
||||
|
||||
lint lint_diff lint_package lint_tests:
|
||||
./scripts/lint_imports.sh
|
||||
[ "$(PYTHON_FILES)" = "" ] || poetry run ruff check $(PYTHON_FILES)
|
||||
[ "$(PYTHON_FILES)" = "" ] || poetry run ruff format $(PYTHON_FILES) --diff
|
||||
[ "$(PYTHON_FILES)" = "" ] || mkdir -p $(MYPY_CACHE) && poetry run mypy $(PYTHON_FILES) --cache-dir $(MYPY_CACHE)
|
||||
[ "$(PYTHON_FILES)" = "" ] || uv run --all-groups ruff check $(PYTHON_FILES)
|
||||
[ "$(PYTHON_FILES)" = "" ] || uv run --all-groups ruff format $(PYTHON_FILES) --diff
|
||||
[ "$(PYTHON_FILES)" = "" ] || mkdir -p $(MYPY_CACHE) && uv run --all-groups mypy $(PYTHON_FILES) --cache-dir $(MYPY_CACHE)
|
||||
|
||||
format format_diff:
|
||||
[ "$(PYTHON_FILES)" = "" ] || poetry run ruff format $(PYTHON_FILES)
|
||||
[ "$(PYTHON_FILES)" = "" ] || poetry run ruff check --select I --fix $(PYTHON_FILES)
|
||||
[ "$(PYTHON_FILES)" = "" ] || uv run --all-groups ruff format $(PYTHON_FILES)
|
||||
[ "$(PYTHON_FILES)" = "" ] || uv run --all-groups ruff check --select I --fix $(PYTHON_FILES)
|
||||
|
||||
spell_check:
|
||||
poetry run codespell --toml pyproject.toml
|
||||
uv run --all-groups codespell --toml pyproject.toml
|
||||
|
||||
spell_fix:
|
||||
poetry run codespell --toml pyproject.toml -w
|
||||
uv run --all-groups codespell --toml pyproject.toml -w
|
||||
|
||||
######################
|
||||
# HELP
|
||||
|
||||
@@ -28,7 +28,7 @@ from typing_extensions import Self
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import openai
|
||||
from openai.types.beta.threads import ThreadMessage
|
||||
from openai.types.beta.threads import ThreadMessage # type: ignore[attr-defined]
|
||||
from openai.types.beta.threads.required_action_function_tool_call import (
|
||||
RequiredActionFunctionToolCall,
|
||||
)
|
||||
@@ -590,7 +590,8 @@ class OpenAIAssistantRunnable(RunnableSerializable[Dict, OutputType]):
|
||||
isinstance(content, openai.types.beta.threads.TextContentBlock)
|
||||
if version_gte_1_14
|
||||
else isinstance(
|
||||
content, openai.types.beta.threads.MessageContentText
|
||||
content,
|
||||
openai.types.beta.threads.MessageContentText, # type: ignore[attr-defined]
|
||||
)
|
||||
)
|
||||
for content in answer
|
||||
@@ -743,7 +744,8 @@ class OpenAIAssistantRunnable(RunnableSerializable[Dict, OutputType]):
|
||||
isinstance(content, openai.types.beta.threads.TextContentBlock)
|
||||
if version_gte_1_14
|
||||
else isinstance(
|
||||
content, openai.types.beta.threads.MessageContentText
|
||||
content,
|
||||
openai.types.beta.threads.MessageContentText, # type: ignore[attr-defined]
|
||||
)
|
||||
)
|
||||
for content in answer
|
||||
|
||||
@@ -236,7 +236,9 @@ class FlareChain(Chain):
|
||||
"Please install langchain-openai."
|
||||
"pip install langchain-openai"
|
||||
)
|
||||
llm = ChatOpenAI(max_tokens=max_generation_len, logprobs=True, temperature=0)
|
||||
llm = ChatOpenAI(
|
||||
max_completion_tokens=max_generation_len, logprobs=True, temperature=0
|
||||
)
|
||||
response_chain = PROMPT | llm
|
||||
question_gen_chain = QUESTION_GENERATOR_PROMPT | llm | StrOutputParser()
|
||||
return cls(
|
||||
|
||||
@@ -65,7 +65,7 @@ class OpenAIModerationChain(Chain):
|
||||
except ValueError:
|
||||
values["openai_pre_1_0"] = True
|
||||
if values["openai_pre_1_0"]:
|
||||
values["client"] = openai.Moderation
|
||||
values["client"] = openai.Moderation # type: ignore[attr-defined]
|
||||
else:
|
||||
values["client"] = openai.OpenAI(api_key=openai_api_key)
|
||||
values["async_client"] = openai.AsyncOpenAI(api_key=openai_api_key)
|
||||
|
||||
@@ -30,7 +30,9 @@ def _embedding_factory() -> Embeddings:
|
||||
from langchain_openai import OpenAIEmbeddings
|
||||
except ImportError:
|
||||
try:
|
||||
from langchain_community.embeddings.openai import OpenAIEmbeddings
|
||||
from langchain_community.embeddings.openai import ( # type: ignore[no-redef]
|
||||
OpenAIEmbeddings,
|
||||
)
|
||||
except ImportError:
|
||||
raise ImportError(
|
||||
"Could not import OpenAIEmbeddings. Please install the "
|
||||
@@ -89,7 +91,9 @@ class _EmbeddingDistanceChainMixin(Chain):
|
||||
pass
|
||||
|
||||
try:
|
||||
from langchain_community.embeddings.openai import OpenAIEmbeddings
|
||||
from langchain_community.embeddings.openai import ( # type: ignore[no-redef]
|
||||
OpenAIEmbeddings,
|
||||
)
|
||||
|
||||
types_.append(OpenAIEmbeddings)
|
||||
except ImportError:
|
||||
|
||||
@@ -135,7 +135,9 @@ def load_evaluator(
|
||||
from langchain_openai import ChatOpenAI
|
||||
except ImportError:
|
||||
try:
|
||||
from langchain_community.chat_models.openai import ChatOpenAI
|
||||
from langchain_community.chat_models.openai import ( # type: ignore[no-redef]
|
||||
ChatOpenAI,
|
||||
)
|
||||
except ImportError:
|
||||
raise ImportError(
|
||||
"Could not import langchain_openai or fallback onto "
|
||||
|
||||
@@ -1,16 +1,119 @@
|
||||
[build-system]
|
||||
requires = [ "poetry-core>=1.0.0",]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
requires = ["pdm-backend"]
|
||||
build-backend = "pdm.backend"
|
||||
|
||||
[tool.poetry]
|
||||
name = "langchain"
|
||||
version = "0.3.17"
|
||||
description = "Building applications with LLMs through composability"
|
||||
[project]
|
||||
authors = []
|
||||
license = "MIT"
|
||||
license = {text = "MIT"}
|
||||
requires-python = "<4.0,>=3.9"
|
||||
dependencies = [
|
||||
"langchain-core<1.0.0,>=0.3.33",
|
||||
"langchain-text-splitters<1.0.0,>=0.3.3",
|
||||
"langsmith<0.4,>=0.1.17",
|
||||
"pydantic<3.0.0,>=2.7.4",
|
||||
"SQLAlchemy<3,>=1.4",
|
||||
"requests<3,>=2",
|
||||
"PyYAML>=5.3",
|
||||
"aiohttp<4.0.0,>=3.8.3",
|
||||
"tenacity!=8.4.0,<10,>=8.1.0",
|
||||
"numpy<2,>=1.26.4; python_version < \"3.12\"",
|
||||
"numpy<3,>=1.26.2; python_version >= \"3.12\"",
|
||||
"async-timeout<5.0.0,>=4.0.0; python_version < \"3.11\"",
|
||||
]
|
||||
name = "langchain"
|
||||
version = "0.3.18rc1"
|
||||
description = "Building applications with LLMs through composability"
|
||||
readme = "README.md"
|
||||
|
||||
[project.optional-dependencies]
|
||||
community = ["langchain-community"]
|
||||
anthropic = ["langchain-anthropic"]
|
||||
openai = ["langchain-openai"]
|
||||
cohere = ["langchain-cohere"]
|
||||
google-vertexai = ["langchain-google-vertexai"]
|
||||
google-genai = ["langchain-google-genai"]
|
||||
fireworks = ["langchain-fireworks"]
|
||||
ollama = ["langchain-ollama"]
|
||||
together = ["langchain-together"]
|
||||
mistralai = ["langchain-mistralai"]
|
||||
huggingface = ["langchain-huggingface"]
|
||||
groq = ["langchain-groq"]
|
||||
aws = ["langchain-aws"]
|
||||
deepseek = ["langchain-deepseek"]
|
||||
|
||||
[project.urls]
|
||||
"Source Code" = "https://github.com/langchain-ai/langchain/tree/master/libs/langchain"
|
||||
"Release Notes" = "https://github.com/langchain-ai/langchain/releases?q=tag%3A%22langchain%3D%3D0%22&expanded=true"
|
||||
repository = "https://github.com/langchain-ai/langchain"
|
||||
|
||||
[project.scripts]
|
||||
langchain-server = "langchain.server:main"
|
||||
|
||||
[dependency-groups]
|
||||
test = [
|
||||
"pytest<9,>=8",
|
||||
"pytest-cov<5.0.0,>=4.0.0",
|
||||
"pytest-dotenv<1.0.0,>=0.5.2",
|
||||
"duckdb-engine<1.0.0,>=0.9.2",
|
||||
"pytest-watcher<1.0.0,>=0.2.6",
|
||||
"freezegun<2.0.0,>=1.2.2",
|
||||
"responses<1.0.0,>=0.22.0",
|
||||
"pytest-asyncio<1.0.0,>=0.23.2",
|
||||
"lark<2.0.0,>=1.1.5",
|
||||
"pandas<3.0.0,>=2.0.0",
|
||||
"pytest-mock<4.0.0,>=3.10.0",
|
||||
"pytest-socket<1.0.0,>=0.6.0",
|
||||
"syrupy<5.0.0,>=4.0.2",
|
||||
"requests-mock<2.0.0,>=1.11.0",
|
||||
"pytest-xdist<4.0.0,>=3.6.1",
|
||||
"cffi<1.17.1; python_version < \"3.10\"",
|
||||
"cffi; python_version >= \"3.10\"",
|
||||
"langchain-tests @ file:///${PROJECT_ROOT}/../standard-tests",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../core",
|
||||
"langchain-text-splitters @ file:///${PROJECT_ROOT}/../text-splitters",
|
||||
"langchain-openai @ file:///${PROJECT_ROOT}/../partners/openai",
|
||||
"toml>=0.10.2",
|
||||
"packaging>=24.2",
|
||||
]
|
||||
codespell = [
|
||||
"codespell<3.0.0,>=2.2.0",
|
||||
]
|
||||
test_integration = [
|
||||
"pytest-vcr<2.0.0,>=1.0.2",
|
||||
"urllib3<2; python_version < \"3.10\"",
|
||||
"wrapt<2.0.0,>=1.15.0",
|
||||
"python-dotenv<2.0.0,>=1.0.0",
|
||||
"cassio<1.0.0,>=0.1.0",
|
||||
"langchainhub<1.0.0,>=0.1.16",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../core",
|
||||
"langchain-text-splitters @ file:///${PROJECT_ROOT}/../text-splitters",
|
||||
]
|
||||
lint = [
|
||||
"ruff<1.0.0,>=0.9.2",
|
||||
"cffi<1.17.1; python_version < \"3.10\"",
|
||||
"cffi; python_version >= \"3.10\"",
|
||||
]
|
||||
typing = [
|
||||
"mypy<2.0,>=1.10",
|
||||
"types-pyyaml<7.0.0.0,>=6.0.12.2",
|
||||
"types-requests<3.0.0.0,>=2.28.11.5",
|
||||
"types-toml<1.0.0.0,>=0.10.8.1",
|
||||
"types-redis<5.0.0.0,>=4.3.21.6",
|
||||
"types-pytz<2024.0.0.0,>=2023.3.0.0",
|
||||
"types-chardet<6.0.0.0,>=5.0.4.6",
|
||||
"mypy-protobuf<4.0.0,>=3.0.0",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../core",
|
||||
"langchain-text-splitters @ file:///${PROJECT_ROOT}/../text-splitters",
|
||||
]
|
||||
dev = [
|
||||
"jupyter<2.0.0,>=1.0.0",
|
||||
"playwright<2.0.0,>=1.28.0",
|
||||
"setuptools<68.0.0,>=67.6.1",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../core",
|
||||
"langchain-text-splitters @ file:///${PROJECT_ROOT}/../text-splitters",
|
||||
]
|
||||
|
||||
|
||||
[tool.ruff]
|
||||
target-version = "py39"
|
||||
exclude = [ "tests/integration_tests/examples/non-utf8-encoding.py",]
|
||||
@@ -25,32 +128,6 @@ skip = ".git,*.pdf,*.svg,*.pdf,*.yaml,*.ipynb,poetry.lock,*.min.js,*.css,package
|
||||
ignore-regex = ".*(Stati Uniti|Tense=Pres).*"
|
||||
ignore-words-list = "momento,collison,ned,foor,reworkd,parth,whats,aapply,mysogyny,unsecure,damon,crate,aadd,symbl,precesses,accademia,nin"
|
||||
|
||||
[tool.poetry.urls]
|
||||
"Source Code" = "https://github.com/langchain-ai/langchain/tree/master/libs/langchain"
|
||||
"Release Notes" = "https://github.com/langchain-ai/langchain/releases?q=tag%3A%22langchain%3D%3D0%22&expanded=true"
|
||||
|
||||
[tool.poetry.scripts]
|
||||
langchain-server = "langchain.server:main"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.9,<4.0"
|
||||
langchain-core = "^0.3.33"
|
||||
langchain-text-splitters = "^0.3.3"
|
||||
langsmith = ">=0.1.17,<0.4"
|
||||
pydantic = "^2.7.4"
|
||||
SQLAlchemy = ">=1.4,<3"
|
||||
requests = "^2"
|
||||
PyYAML = ">=5.3"
|
||||
aiohttp = "^3.8.3"
|
||||
tenacity = ">=8.1.0,!=8.4.0,<10"
|
||||
[[tool.poetry.dependencies.numpy]]
|
||||
version = ">=1.22.4,<2"
|
||||
python = "<3.12"
|
||||
|
||||
[[tool.poetry.dependencies.numpy]]
|
||||
version = ">=1.26.2,<3"
|
||||
python = ">=3.12"
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = [ "E", "F", "I", "T201", "D",]
|
||||
pydocstyle = { convention = "google" }
|
||||
@@ -67,125 +144,3 @@ addopts = "--strict-markers --strict-config --durations=5 --snapshot-warn-unused
|
||||
markers = [ "requires: mark tests as requiring a specific library", "scheduled: mark tests to run in scheduled testing", "compile: mark placeholder test used to compile integration tests without running them",]
|
||||
asyncio_mode = "auto"
|
||||
filterwarnings = [ "ignore::langchain_core._api.beta_decorator.LangChainBetaWarning", "ignore::langchain_core._api.deprecation.LangChainDeprecationWarning:tests", "ignore::langchain_core._api.deprecation.LangChainPendingDeprecationWarning:tests",]
|
||||
|
||||
[tool.poetry.dependencies.async-timeout]
|
||||
version = "^4.0.0"
|
||||
python = "<3.11"
|
||||
|
||||
[tool.poetry.group.test]
|
||||
optional = true
|
||||
|
||||
[tool.poetry.group.codespell]
|
||||
optional = true
|
||||
|
||||
[tool.poetry.group.test_integration]
|
||||
optional = true
|
||||
|
||||
[tool.poetry.group.lint]
|
||||
optional = true
|
||||
|
||||
[tool.poetry.group.typing]
|
||||
optional = true
|
||||
|
||||
[tool.poetry.group.dev]
|
||||
optional = true
|
||||
|
||||
[tool.poetry.group.test.dependencies]
|
||||
pytest = "^8"
|
||||
pytest-cov = "^4.0.0"
|
||||
pytest-dotenv = "^0.5.2"
|
||||
duckdb-engine = "^0.9.2"
|
||||
pytest-watcher = "^0.2.6"
|
||||
freezegun = "^1.2.2"
|
||||
responses = "^0.22.0"
|
||||
pytest-asyncio = "^0.23.2"
|
||||
lark = "^1.1.5"
|
||||
pandas = "^2.0.0"
|
||||
pytest-mock = "^3.10.0"
|
||||
pytest-socket = "^0.6.0"
|
||||
syrupy = "^4.0.2"
|
||||
requests-mock = "^1.11.0"
|
||||
pytest-xdist = "^3.6.1"
|
||||
[[tool.poetry.group.test.dependencies.cffi]]
|
||||
version = "<1.17.1"
|
||||
python = "<3.10"
|
||||
|
||||
[[tool.poetry.group.test.dependencies.cffi]]
|
||||
version = "*"
|
||||
python = ">=3.10"
|
||||
|
||||
[tool.poetry.group.codespell.dependencies]
|
||||
codespell = "^2.2.0"
|
||||
|
||||
[tool.poetry.group.test_integration.dependencies]
|
||||
pytest-vcr = "^1.0.2"
|
||||
wrapt = "^1.15.0"
|
||||
python-dotenv = "^1.0.0"
|
||||
cassio = "^0.1.0"
|
||||
langchainhub = "^0.1.16"
|
||||
|
||||
[tool.poetry.group.lint.dependencies]
|
||||
ruff = "^0.9.2"
|
||||
[[tool.poetry.group.lint.dependencies.cffi]]
|
||||
version = "<1.17.1"
|
||||
python = "<3.10"
|
||||
|
||||
[[tool.poetry.group.lint.dependencies.cffi]]
|
||||
version = "*"
|
||||
python = ">=3.10"
|
||||
|
||||
[tool.poetry.group.typing.dependencies]
|
||||
mypy = "^1.10"
|
||||
types-pyyaml = "^6.0.12.2"
|
||||
types-requests = "^2.28.11.5"
|
||||
types-toml = "^0.10.8.1"
|
||||
types-redis = "^4.3.21.6"
|
||||
types-pytz = "^2023.3.0.0"
|
||||
types-chardet = "^5.0.4.6"
|
||||
mypy-protobuf = "^3.0.0"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
jupyter = "^1.0.0"
|
||||
playwright = "^1.28.0"
|
||||
setuptools = "^67.6.1"
|
||||
|
||||
[tool.poetry.group.test.dependencies.langchain-tests]
|
||||
path = "../standard-tests"
|
||||
develop = true
|
||||
|
||||
[tool.poetry.group.test.dependencies.langchain-core]
|
||||
path = "../core"
|
||||
develop = true
|
||||
|
||||
[tool.poetry.group.test.dependencies.langchain-text-splitters]
|
||||
path = "../text-splitters"
|
||||
develop = true
|
||||
|
||||
[tool.poetry.group.test.dependencies.langchain-openai]
|
||||
path = "../partners/openai"
|
||||
optional = true
|
||||
develop = true
|
||||
|
||||
[tool.poetry.group.test_integration.dependencies.langchain-core]
|
||||
path = "../core"
|
||||
develop = true
|
||||
|
||||
[tool.poetry.group.test_integration.dependencies.langchain-text-splitters]
|
||||
path = "../text-splitters"
|
||||
develop = true
|
||||
|
||||
[tool.poetry.group.typing.dependencies.langchain-core]
|
||||
path = "../core"
|
||||
develop = true
|
||||
|
||||
[tool.poetry.group.typing.dependencies.langchain-text-splitters]
|
||||
path = "../text-splitters"
|
||||
develop = true
|
||||
|
||||
[tool.poetry.group.dev.dependencies.langchain-core]
|
||||
path = "../core"
|
||||
develop = true
|
||||
|
||||
[tool.poetry.group.dev.dependencies.langchain-text-splitters]
|
||||
path = "../text-splitters"
|
||||
develop = true
|
||||
|
||||
@@ -5,6 +5,7 @@ from typing import Any, Dict, Mapping
|
||||
|
||||
import pytest
|
||||
import toml
|
||||
from packaging.requirements import Requirement
|
||||
|
||||
HERE = Path(__file__).parent
|
||||
|
||||
@@ -12,30 +13,21 @@ PYPROJECT_TOML = HERE / "../../pyproject.toml"
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def poetry_conf() -> Dict[str, Any]:
|
||||
def uv_conf() -> Dict[str, Any]:
|
||||
"""Load the pyproject.toml file."""
|
||||
with open(PYPROJECT_TOML) as f:
|
||||
return toml.load(f)["tool"]["poetry"]
|
||||
return toml.load(f)
|
||||
|
||||
|
||||
def test_required_dependencies(poetry_conf: Mapping[str, Any]) -> None:
|
||||
def test_required_dependencies(uv_conf: Mapping[str, Any]) -> None:
|
||||
"""A test that checks if a new non-optional dependency is being introduced.
|
||||
|
||||
If this test is triggered, it means that a contributor is trying to introduce a new
|
||||
required dependency. This should be avoided in most situations.
|
||||
"""
|
||||
# Get the dependencies from the [tool.poetry.dependencies] section
|
||||
dependencies = poetry_conf["dependencies"]
|
||||
|
||||
is_required = {
|
||||
package_name: isinstance(requirements, str)
|
||||
or isinstance(requirements, list)
|
||||
or not requirements.get("optional", False)
|
||||
for package_name, requirements in dependencies.items()
|
||||
}
|
||||
required_dependencies = [
|
||||
package_name for package_name, required in is_required.items() if required
|
||||
]
|
||||
dependencies = uv_conf["project"]["dependencies"]
|
||||
required_dependencies = set(Requirement(dep).name for dep in dependencies)
|
||||
|
||||
assert sorted(required_dependencies) == sorted(
|
||||
[
|
||||
@@ -48,22 +40,13 @@ def test_required_dependencies(poetry_conf: Mapping[str, Any]) -> None:
|
||||
"langsmith",
|
||||
"numpy",
|
||||
"pydantic",
|
||||
"python",
|
||||
"requests",
|
||||
"tenacity",
|
||||
]
|
||||
)
|
||||
|
||||
unrequired_dependencies = [
|
||||
package_name for package_name, required in is_required.items() if not required
|
||||
]
|
||||
in_extras = [
|
||||
dep for group in poetry_conf.get("extras", {}).values() for dep in group
|
||||
]
|
||||
assert set(unrequired_dependencies) == set(in_extras)
|
||||
|
||||
|
||||
def test_test_group_dependencies(poetry_conf: Mapping[str, Any]) -> None:
|
||||
def test_test_group_dependencies(uv_conf: Mapping[str, Any]) -> None:
|
||||
"""Check if someone is attempting to add additional test dependencies.
|
||||
|
||||
Only dependencies associated with test running infrastructure should be added
|
||||
@@ -72,9 +55,10 @@ def test_test_group_dependencies(poetry_conf: Mapping[str, Any]) -> None:
|
||||
Examples of dependencies that should NOT be included: boto3, azure, postgres, etc.
|
||||
"""
|
||||
|
||||
test_group_deps = sorted(poetry_conf["group"]["test"]["dependencies"])
|
||||
dependencies = uv_conf["dependency-groups"]["test"]
|
||||
test_group_deps = set(Requirement(dep).name for dep in dependencies)
|
||||
|
||||
assert test_group_deps == sorted(
|
||||
assert sorted(test_group_deps) == sorted(
|
||||
[
|
||||
"duckdb-engine",
|
||||
"freezegun",
|
||||
@@ -83,6 +67,7 @@ def test_test_group_dependencies(poetry_conf: Mapping[str, Any]) -> None:
|
||||
"langchain-text-splitters",
|
||||
"langchain-openai",
|
||||
"lark",
|
||||
"packaging",
|
||||
"pandas",
|
||||
"pytest",
|
||||
"pytest-asyncio",
|
||||
@@ -94,6 +79,7 @@ def test_test_group_dependencies(poetry_conf: Mapping[str, Any]) -> None:
|
||||
"pytest-xdist",
|
||||
"responses",
|
||||
"syrupy",
|
||||
"toml",
|
||||
"requests-mock",
|
||||
# TODO: temporary hack since cffi 1.17.1 doesn't work with py 3.9.
|
||||
"cffi",
|
||||
|
||||
6112
libs/langchain/uv.lock
generated
Normal file
6112
libs/langchain/uv.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
@@ -375,3 +375,8 @@ packages:
|
||||
path: .
|
||||
repo: Amitgb14/langchain_jenkins
|
||||
downloads: 0
|
||||
- name: langchain-goodfire
|
||||
path: .
|
||||
repo: keenanpepper/langchain-goodfire
|
||||
downloads: 51
|
||||
downloads_updated_at: '2025-01-30T00:00:00+00:00'
|
||||
|
||||
12
libs/partners/deepseek/poetry.lock
generated
12
libs/partners/deepseek/poetry.lock
generated
@@ -470,13 +470,13 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.3.31"
|
||||
version = "0.3.33"
|
||||
description = "Building applications with LLMs through composability"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.9"
|
||||
files = [
|
||||
{file = "langchain_core-0.3.31-py3-none-any.whl", hash = "sha256:882e64ad95887c951dce8e835889e43263b11848c394af3b73e06912624bd743"},
|
||||
{file = "langchain_core-0.3.31.tar.gz", hash = "sha256:5ffa56354c07de9efaa4139609659c63e7d9b29da2c825f6bab9392ec98300df"},
|
||||
{file = "langchain_core-0.3.33-py3-none-any.whl", hash = "sha256:269706408a2223f863ff1f9616f31903a5712403199d828b50aadbc4c28b553a"},
|
||||
{file = "langchain_core-0.3.33.tar.gz", hash = "sha256:b5dd93a4e7f8198d2fc6048723b0bfecf7aaf128b0d268cbac19c34c1579b953"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -493,7 +493,7 @@ typing-extensions = ">=4.7"
|
||||
|
||||
[[package]]
|
||||
name = "langchain-openai"
|
||||
version = "0.3.2"
|
||||
version = "0.3.3"
|
||||
description = "An integration package connecting OpenAI and LangChain"
|
||||
optional = false
|
||||
python-versions = ">=3.9,<4.0"
|
||||
@@ -501,7 +501,7 @@ files = []
|
||||
develop = false
|
||||
|
||||
[package.dependencies]
|
||||
langchain-core = "^0.3.31"
|
||||
langchain-core = "^0.3.33"
|
||||
openai = "^1.58.1"
|
||||
tiktoken = ">=0.7,<1"
|
||||
|
||||
@@ -1663,4 +1663,4 @@ cffi = ["cffi (>=1.11)"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.9,<4.0"
|
||||
content-hash = "9c889754b3cb5e1044c6f7eeab3a5e4857179762c18a6d378fbb1ba72ab5b08b"
|
||||
content-hash = "b6af7dbc4b4ab30ab64966c69662c811556bff8c3a25ff20ede1413714ebb271"
|
||||
|
||||
@@ -22,7 +22,7 @@ disallow_untyped_defs = "True"
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.9,<4.0"
|
||||
langchain-core = "^0.3.15"
|
||||
langchain-openai = "^0.3.2"
|
||||
langchain-openai = "^0.3.3"
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = ["E", "F", "I", "T201"]
|
||||
|
||||
@@ -3,6 +3,8 @@
|
||||
from typing import Type
|
||||
|
||||
import pytest
|
||||
from langchain_core.language_models import BaseChatModel
|
||||
from langchain_core.tools import BaseTool
|
||||
from langchain_tests.integration_tests import ChatModelIntegrationTests
|
||||
|
||||
from langchain_deepseek.chat_models import ChatDeepSeek
|
||||
@@ -21,8 +23,14 @@ class TestChatDeepSeek(ChatModelIntegrationTests):
|
||||
"temperature": 0,
|
||||
}
|
||||
|
||||
@pytest.mark.xfail(reason="Not yet supported.")
|
||||
def test_tool_message_histories_list_content(
|
||||
self, model: BaseChatModel, my_adder_tool: BaseTool
|
||||
) -> None:
|
||||
super().test_tool_message_histories_list_content(model, my_adder_tool)
|
||||
|
||||
@pytest.mark.xfail(reason="Reasoning API is down")
|
||||
|
||||
@pytest.mark.xfail(reason="Takes > 30s to run.")
|
||||
def test_reasoning_content() -> None:
|
||||
"""Test reasoning content."""
|
||||
chat_model = ChatDeepSeek(model="deepseek-reasoner")
|
||||
|
||||
@@ -19,7 +19,7 @@ class TestOpenAIStandard(ChatModelIntegrationTests):
|
||||
|
||||
@property
|
||||
def chat_model_params(self) -> dict:
|
||||
return {"model": "o3-mini", "stream_usage": True}
|
||||
return {"model": "gpt-4o-mini", "stream_usage": True}
|
||||
|
||||
@property
|
||||
def supports_image_inputs(self) -> bool:
|
||||
|
||||
@@ -1,15 +1,6 @@
|
||||
[build-system]
|
||||
requires = ["poetry-core>=1.0.0"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.poetry]
|
||||
name = "langchain-text-splitters"
|
||||
version = "0.3.5"
|
||||
description = "LangChain text splitting utilities"
|
||||
authors = []
|
||||
license = "MIT"
|
||||
readme = "README.md"
|
||||
repository = "https://github.com/langchain-ai/langchain"
|
||||
requires = ["pdm-backend"]
|
||||
build-backend = "pdm.backend"
|
||||
|
||||
[tool.mypy]
|
||||
disallow_untyped_defs = "True"
|
||||
@@ -30,14 +21,6 @@ module = [
|
||||
]
|
||||
ignore_missing_imports = "True"
|
||||
|
||||
[tool.poetry.urls]
|
||||
"Source Code" = "https://github.com/langchain-ai/langchain/tree/master/libs/text-splitters"
|
||||
"Release Notes" = "https://github.com/langchain-ai/langchain/releases?q=tag%3A%22langchain-text-splitters%3D%3D0%22&expanded=true"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.9,<4.0"
|
||||
langchain-core = "^0.3.29"
|
||||
|
||||
[tool.ruff]
|
||||
target-version = "py39"
|
||||
|
||||
@@ -56,84 +39,92 @@ markers = [
|
||||
]
|
||||
asyncio_mode = "auto"
|
||||
|
||||
[tool.poetry.group.lint]
|
||||
optional = true
|
||||
|
||||
[tool.poetry.group.typing]
|
||||
optional = true
|
||||
|
||||
[tool.poetry.group.dev]
|
||||
optional = true
|
||||
|
||||
[tool.poetry.group.test]
|
||||
optional = true
|
||||
|
||||
[tool.ruff.lint.pydocstyle]
|
||||
convention = "google"
|
||||
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"tests/**" = ["D"]
|
||||
|
||||
[tool.poetry.group.lint.dependencies]
|
||||
ruff = "^0.9.2"
|
||||
[tool.pdm.dev-dependencies]
|
||||
lint = [
|
||||
"ruff<1.0.0,>=0.9.2",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../core",
|
||||
]
|
||||
typing = [
|
||||
"mypy<2.0,>=1.10",
|
||||
"lxml-stubs<1.0.0,>=0.5.1",
|
||||
"types-requests<3.0.0.0,>=2.31.0.20240218",
|
||||
"tiktoken<1.0.0,>=0.8.0",
|
||||
]
|
||||
dev = [
|
||||
"jupyter<2.0.0,>=1.0.0",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../core",
|
||||
]
|
||||
test = [
|
||||
"pytest<9,>=8",
|
||||
"freezegun<2.0.0,>=1.2.2",
|
||||
"pytest-mock<4.0.0,>=3.10.0",
|
||||
"pytest-watcher<1.0.0,>=0.3.4",
|
||||
"pytest-asyncio<1.0.0,>=0.21.1",
|
||||
"pytest-socket<1.0.0,>=0.7.0",
|
||||
"pytest-xdist<4.0.0,>=3.6.1",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../core",
|
||||
]
|
||||
test_integration = [
|
||||
"spacy; python_version < \"3.13\"",
|
||||
"nltk<4.0.0,>=3.9.1",
|
||||
"transformers<5.0.0,>=4.47.0",
|
||||
"sentence-transformers>=2.6.0; python_version < \"3.13\"",
|
||||
]
|
||||
|
||||
[tool.pdm.build]
|
||||
includes = []
|
||||
|
||||
[project]
|
||||
authors = []
|
||||
license = {text = "MIT"}
|
||||
requires-python = "<4.0,>=3.9"
|
||||
dependencies = [
|
||||
"langchain-core<1.0.0,>=0.3.29",
|
||||
]
|
||||
name = "langchain-text-splitters"
|
||||
version = "0.3.5"
|
||||
description = "LangChain text splitting utilities"
|
||||
readme = "README.md"
|
||||
|
||||
[project.urls]
|
||||
"Source Code" = "https://github.com/langchain-ai/langchain/tree/master/libs/text-splitters"
|
||||
"Release Notes" = "https://github.com/langchain-ai/langchain/releases?q=tag%3A%22langchain-text-splitters%3D%3D0%22&expanded=true"
|
||||
repository = "https://github.com/langchain-ai/langchain"
|
||||
|
||||
[tool.poetry.group.typing.dependencies]
|
||||
mypy = "^1.10"
|
||||
lxml-stubs = "^0.5.1"
|
||||
types-requests = "^2.31.0.20240218"
|
||||
tiktoken = "^0.8.0"
|
||||
|
||||
|
||||
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
jupyter = "^1.0.0"
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
[tool.poetry.group.test.dependencies]
|
||||
pytest = "^8"
|
||||
freezegun = "^1.2.2"
|
||||
pytest-mock = "^3.10.0"
|
||||
pytest-watcher = "^0.3.4"
|
||||
pytest-asyncio = "^0.21.1"
|
||||
pytest-socket = "^0.7.0"
|
||||
pytest-xdist = "^3.6.1"
|
||||
|
||||
|
||||
|
||||
|
||||
[tool.poetry.group.test_integration]
|
||||
optional = true
|
||||
|
||||
[tool.poetry.group.test_integration.dependencies]
|
||||
spacy = { version = "*", python = "<3.13" }
|
||||
nltk = "^3.9.1"
|
||||
transformers = "^4.47.0"
|
||||
sentence-transformers = { version = ">=2.6.0", python = "<3.13" }
|
||||
|
||||
|
||||
|
||||
|
||||
[tool.poetry.group.lint.dependencies.langchain-core]
|
||||
path = "../core"
|
||||
develop = true
|
||||
|
||||
|
||||
|
||||
|
||||
[tool.poetry.group.dev.dependencies.langchain-core]
|
||||
path = "../core"
|
||||
develop = true
|
||||
|
||||
|
||||
|
||||
|
||||
[tool.poetry.group.test.dependencies.langchain-core]
|
||||
path = "../core"
|
||||
develop = true
|
||||
[dependency-groups]
|
||||
lint = [
|
||||
"ruff<1.0.0,>=0.9.2",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../core",
|
||||
]
|
||||
typing = [
|
||||
"mypy<2.0,>=1.10",
|
||||
"lxml-stubs<1.0.0,>=0.5.1",
|
||||
"types-requests<3.0.0.0,>=2.31.0.20240218",
|
||||
"tiktoken<1.0.0,>=0.8.0",
|
||||
]
|
||||
dev = [
|
||||
"jupyter<2.0.0,>=1.0.0",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../core",
|
||||
]
|
||||
test = [
|
||||
"pytest<9,>=8",
|
||||
"freezegun<2.0.0,>=1.2.2",
|
||||
"pytest-mock<4.0.0,>=3.10.0",
|
||||
"pytest-watcher<1.0.0,>=0.3.4",
|
||||
"pytest-asyncio<1.0.0,>=0.21.1",
|
||||
"pytest-socket<1.0.0,>=0.7.0",
|
||||
"pytest-xdist<4.0.0,>=3.6.1",
|
||||
"langchain-core @ file:///${PROJECT_ROOT}/../core",
|
||||
]
|
||||
test_integration = [
|
||||
"spacy; python_version < \"3.13\"",
|
||||
"nltk<4.0.0,>=3.9.1",
|
||||
"transformers<5.0.0,>=4.47.0",
|
||||
"sentence-transformers>=2.6.0; python_version < \"3.13\"",
|
||||
]
|
||||
|
||||
3975
libs/text-splitters/uv.lock
generated
Normal file
3975
libs/text-splitters/uv.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user