mirror of
https://github.com/hwchase17/langchain.git
synced 2026-04-19 03:44:40 +00:00
Compare commits
204 Commits
cc/extende
...
eugene/pyd
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e4727bf17d | ||
|
|
f16da0c9d3 | ||
|
|
1bf9a04f26 | ||
|
|
05bd2b4618 | ||
|
|
608c4a4327 | ||
|
|
042ef83bbf | ||
|
|
409196add2 | ||
|
|
ed889192d4 | ||
|
|
89c2659f33 | ||
|
|
df6cddafba | ||
|
|
e76e6046fc | ||
|
|
d2a69a7b6b | ||
|
|
85f673c7ea | ||
|
|
41a632a2ae | ||
|
|
35aacff6c6 | ||
|
|
40df0249fb | ||
|
|
f298cd3446 | ||
|
|
e19b6ad266 | ||
|
|
8defdb10d8 | ||
|
|
458d6f76ad | ||
|
|
bde3dbaed2 | ||
|
|
6b24eeb884 | ||
|
|
d82eec6aad | ||
|
|
da037f1c55 | ||
|
|
21a43ee3dc | ||
|
|
28594567de | ||
|
|
cacd68b2a7 | ||
|
|
1ee432e904 | ||
|
|
0bfbd4f7bd | ||
|
|
7ededced7d | ||
|
|
284e1a7e9e | ||
|
|
d67e1dfe32 | ||
|
|
df4fc60312 | ||
|
|
c074922876 | ||
|
|
bbb7c267e5 | ||
|
|
20f880ac84 | ||
|
|
9f0968f9d2 | ||
|
|
8b7f5e1ec0 | ||
|
|
b9575d586b | ||
|
|
934bc5b030 | ||
|
|
da48c98eaf | ||
|
|
844955d6e1 | ||
|
|
c417bbc313 | ||
|
|
edcd348ce7 | ||
|
|
d3499cc90b | ||
|
|
2b15183980 | ||
|
|
157d32b294 | ||
|
|
ec49d0d0b4 | ||
|
|
19bce9aba1 | ||
|
|
a56812f970 | ||
|
|
89c6cb6f8b | ||
|
|
aa9f247803 | ||
|
|
fce9322d2e | ||
|
|
e2c8690e7f | ||
|
|
287911adbc | ||
|
|
fc08f240ee | ||
|
|
bee8994b7e | ||
|
|
c62772885a | ||
|
|
374bb40852 | ||
|
|
c43c62b6c9 | ||
|
|
bb83f1b875 | ||
|
|
ed412d89b4 | ||
|
|
789f4b1c9c | ||
|
|
13ba15b2cc | ||
|
|
b904763115 | ||
|
|
cecdc119bf | ||
|
|
7975c1f0ca | ||
|
|
622cb7d2cf | ||
|
|
3a0c7c705c | ||
|
|
162d3ff54b | ||
|
|
301be2d40a | ||
|
|
6df9178056 | ||
|
|
6208773c77 | ||
|
|
e24259fee7 | ||
|
|
9132516c84 | ||
|
|
f2f9187919 | ||
|
|
438301db90 | ||
|
|
7842e2c460 | ||
|
|
949127fbc1 | ||
|
|
baad44965e | ||
|
|
d588ce1f29 | ||
|
|
2070d659a0 | ||
|
|
6c8d626d70 | ||
|
|
109ba548bd | ||
|
|
0f4a087186 | ||
|
|
71268f7a15 | ||
|
|
b8fc82b84b | ||
|
|
179eeead81 | ||
|
|
7a57b4fbbf | ||
|
|
d9ba65ca26 | ||
|
|
0319ccd273 | ||
|
|
6e2a72c218 | ||
|
|
9f482f4284 | ||
|
|
15466d89a2 | ||
|
|
61087b0c0d | ||
|
|
b2ba4f4072 | ||
|
|
b2c8f2de4c | ||
|
|
6df9360e32 | ||
|
|
b664b3364c | ||
|
|
bccc546a25 | ||
|
|
6405e7fa07 | ||
|
|
ae24f7364d | ||
|
|
81f8c2f33d | ||
|
|
c27703a10f | ||
|
|
1b77063c88 | ||
|
|
b74546a458 | ||
|
|
8a3a9c8968 | ||
|
|
776d01db49 | ||
|
|
40b43b0bfb | ||
|
|
6fd4ac4283 | ||
|
|
f4e7cb394f | ||
|
|
1ecaffab8a | ||
|
|
5bbd5364f1 | ||
|
|
e02b093d81 | ||
|
|
0cc6584889 | ||
|
|
6e1b0d0228 | ||
|
|
a111098230 | ||
|
|
9e7222618b | ||
|
|
8516a03a02 | ||
|
|
1ad66e70dc | ||
|
|
76564edd3a | ||
|
|
1c51e1693d | ||
|
|
a267da6a3a | ||
|
|
8da2ace99d | ||
|
|
e358846b39 | ||
|
|
3c598d25a6 | ||
|
|
e5aa0f938b | ||
|
|
79c46319dd | ||
|
|
c5d4dfefc0 | ||
|
|
6e853501ec | ||
|
|
fd1f3ca213 | ||
|
|
567a4ce5aa | ||
|
|
923ce84aa7 | ||
|
|
9379613132 | ||
|
|
c72a76237f | ||
|
|
f9cafcbcb0 | ||
|
|
1fce5543bc | ||
|
|
88e9e6bf55 | ||
|
|
7f0dd4b182 | ||
|
|
5557b86a54 | ||
|
|
caf4ae3a45 | ||
|
|
c88b75ca6a | ||
|
|
e409a85a28 | ||
|
|
40634d441a | ||
|
|
1d2a503ab8 | ||
|
|
b924c61440 | ||
|
|
efa10c8ef8 | ||
|
|
0a6c67ce6a | ||
|
|
ed771f2d2b | ||
|
|
63ba12d8e0 | ||
|
|
f785cf029b | ||
|
|
be7cd0756f | ||
|
|
51c6899850 | ||
|
|
163d6fe8ef | ||
|
|
7cee7fbfad | ||
|
|
4799ad95d0 | ||
|
|
88065d794b | ||
|
|
b27bfa6717 | ||
|
|
5adeaf0732 | ||
|
|
f9d91e19c5 | ||
|
|
4c7afb0d6c | ||
|
|
c1ff61669d | ||
|
|
54d6808c1e | ||
|
|
78468de2e5 | ||
|
|
76572f963b | ||
|
|
c0448f27ba | ||
|
|
179aaa4007 | ||
|
|
d072d592a1 | ||
|
|
78c454c130 | ||
|
|
5199555c0d | ||
|
|
5e31cd91a7 | ||
|
|
49a1f5dd47 | ||
|
|
d0cc9b022a | ||
|
|
a91bd2737a | ||
|
|
5ad2b8ce80 | ||
|
|
b78764599b | ||
|
|
2888e34f53 | ||
|
|
dd4418a503 | ||
|
|
a976f2071b | ||
|
|
5f98975be0 | ||
|
|
0529c991ce | ||
|
|
954abcce59 | ||
|
|
6ad515d34e | ||
|
|
99348e1614 | ||
|
|
2c742cc20d | ||
|
|
02f87203f7 | ||
|
|
56163481dd | ||
|
|
6aac2eeab5 | ||
|
|
559d8a4d13 | ||
|
|
ec9e8eb71c | ||
|
|
9399df7777 | ||
|
|
5fc1104d00 | ||
|
|
6777106fbe | ||
|
|
5f5287c3b0 | ||
|
|
615f8b0d47 | ||
|
|
9a9ab65030 | ||
|
|
241b6d2355 | ||
|
|
91e09ffee5 | ||
|
|
8e4bae351e | ||
|
|
0da201c1d5 | ||
|
|
29413a22e1 | ||
|
|
ae5a574aa5 | ||
|
|
5a0e82c31c | ||
|
|
8590b421c4 |
27
.github/DISCUSSION_TEMPLATE/q-a.yml
vendored
27
.github/DISCUSSION_TEMPLATE/q-a.yml
vendored
@@ -96,22 +96,27 @@ body:
|
||||
- type: textarea
|
||||
id: system-info
|
||||
attributes:
|
||||
label: System Info
|
||||
description: |
|
||||
Please share your system info with us. Do NOT skip this step and please don't trim
|
||||
the output. Most users don't include enough information here and it makes it harder
|
||||
for us to help you.
|
||||
Please share your system info with us.
|
||||
|
||||
Run the following command in your terminal and paste the output here:
|
||||
"pip freeze | grep langchain"
|
||||
platform (windows / linux / mac)
|
||||
python version
|
||||
|
||||
OR if you're on a recent version of langchain-core you can paste the output of:
|
||||
|
||||
python -m langchain_core.sys_info
|
||||
|
||||
or if you have an existing python interpreter running:
|
||||
|
||||
from langchain_core import sys_info
|
||||
sys_info.print_sys_info()
|
||||
|
||||
alternatively, put the entire output of `pip freeze` here.
|
||||
placeholder: |
|
||||
"pip freeze | grep langchain"
|
||||
platform
|
||||
python version
|
||||
|
||||
Alternatively, if you're on a recent version of langchain-core you can paste the output of:
|
||||
|
||||
python -m langchain_core.sys_info
|
||||
|
||||
These will only surface LangChain packages, don't forget to include any other relevant
|
||||
packages you're using (if you're not sure what's relevant, you can paste the entire output of `pip freeze`).
|
||||
validations:
|
||||
required: true
|
||||
|
||||
26
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
26
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
@@ -96,21 +96,25 @@ body:
|
||||
attributes:
|
||||
label: System Info
|
||||
description: |
|
||||
Please share your system info with us. Do NOT skip this step and please don't trim
|
||||
the output. Most users don't include enough information here and it makes it harder
|
||||
for us to help you.
|
||||
Please share your system info with us.
|
||||
|
||||
Run the following command in your terminal and paste the output here:
|
||||
"pip freeze | grep langchain"
|
||||
platform (windows / linux / mac)
|
||||
python version
|
||||
|
||||
OR if you're on a recent version of langchain-core you can paste the output of:
|
||||
|
||||
python -m langchain_core.sys_info
|
||||
|
||||
or if you have an existing python interpreter running:
|
||||
|
||||
from langchain_core import sys_info
|
||||
sys_info.print_sys_info()
|
||||
|
||||
alternatively, put the entire output of `pip freeze` here.
|
||||
placeholder: |
|
||||
"pip freeze | grep langchain"
|
||||
platform
|
||||
python version
|
||||
|
||||
Alternatively, if you're on a recent version of langchain-core you can paste the output of:
|
||||
|
||||
python -m langchain_core.sys_info
|
||||
|
||||
These will only surface LangChain packages, don't forget to include any other relevant
|
||||
packages you're using (if you're not sure what's relevant, you can paste the entire output of `pip freeze`).
|
||||
validations:
|
||||
required: true
|
||||
|
||||
97
.github/scripts/check_diff.py
vendored
97
.github/scripts/check_diff.py
vendored
@@ -2,12 +2,10 @@ import glob
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import tomllib
|
||||
from collections import defaultdict
|
||||
from typing import Dict, List, Set
|
||||
from pathlib import Path
|
||||
import tomllib
|
||||
|
||||
from get_min_versions import get_min_version_from_toml
|
||||
|
||||
|
||||
LANGCHAIN_DIRS = [
|
||||
@@ -68,9 +66,7 @@ def dependents_graph() -> dict:
|
||||
|
||||
# load extended deps from extended_testing_deps.txt
|
||||
package_path = Path(path).parent
|
||||
extended_requirement_path = (
|
||||
package_path / "extended_dependencies" / "extended_testing_deps.txt"
|
||||
)
|
||||
extended_requirement_path = package_path / "extended_testing_deps.txt"
|
||||
if extended_requirement_path.exists():
|
||||
with open(extended_requirement_path, "r") as f:
|
||||
extended_deps = f.read().splitlines()
|
||||
@@ -109,85 +105,33 @@ def add_dependents(dirs_to_eval: Set[str], dependents: dict) -> List[str]:
|
||||
|
||||
|
||||
def _get_configs_for_single_dir(job: str, dir_: str) -> List[Dict[str, str]]:
|
||||
if job == "test-pydantic":
|
||||
return _get_pydantic_test_configs(dir_)
|
||||
|
||||
if dir_ == "libs/core":
|
||||
py_versions = ["3.9", "3.10", "3.11", "3.12"]
|
||||
return [
|
||||
{"working-directory": dir_, "python-version": f"3.{v}"}
|
||||
for v in range(9, 13)
|
||||
]
|
||||
min_python = "3.9"
|
||||
max_python = "3.12"
|
||||
|
||||
# custom logic for specific directories
|
||||
elif dir_ == "libs/partners/milvus":
|
||||
if dir_ == "libs/partners/milvus":
|
||||
# milvus poetry doesn't allow 3.12 because they
|
||||
# declare deps in funny way
|
||||
py_versions = ["3.9", "3.11"]
|
||||
max_python = "3.11"
|
||||
|
||||
elif dir_ in ["libs/community", "libs/langchain"] and job == "extended-tests":
|
||||
if dir_ in ["libs/community", "libs/langchain"] and job == "extended-tests":
|
||||
# community extended test resolution in 3.12 is slow
|
||||
# even in uv
|
||||
py_versions = ["3.9", "3.11"]
|
||||
max_python = "3.11"
|
||||
|
||||
elif dir_ == "libs/community" and job == "compile-integration-tests":
|
||||
if dir_ == "libs/community" and job == "compile-integration-tests":
|
||||
# community integration deps are slow in 3.12
|
||||
py_versions = ["3.9", "3.11"]
|
||||
else:
|
||||
py_versions = ["3.9", "3.12"]
|
||||
max_python = "3.11"
|
||||
|
||||
return [{"working-directory": dir_, "python-version": py_v} for py_v in py_versions]
|
||||
|
||||
|
||||
def _get_pydantic_test_configs(
|
||||
dir_: str, *, python_version: str = "3.11"
|
||||
) -> List[Dict[str, str]]:
|
||||
with open("./libs/core/poetry.lock", "rb") as f:
|
||||
core_poetry_lock_data = tomllib.load(f)
|
||||
for package in core_poetry_lock_data["package"]:
|
||||
if package["name"] == "pydantic":
|
||||
core_max_pydantic_minor = package["version"].split(".")[1]
|
||||
break
|
||||
|
||||
with open(f"./{dir_}/poetry.lock", "rb") as f:
|
||||
dir_poetry_lock_data = tomllib.load(f)
|
||||
|
||||
for package in dir_poetry_lock_data["package"]:
|
||||
if package["name"] == "pydantic":
|
||||
dir_max_pydantic_minor = package["version"].split(".")[1]
|
||||
break
|
||||
|
||||
core_min_pydantic_version = get_min_version_from_toml(
|
||||
"./libs/core/pyproject.toml", "release", python_version, include=["pydantic"]
|
||||
)["pydantic"]
|
||||
core_min_pydantic_minor = core_min_pydantic_version.split(".")[1] if "." in core_min_pydantic_version else "0"
|
||||
dir_min_pydantic_version = (
|
||||
get_min_version_from_toml(
|
||||
f"./{dir_}/pyproject.toml", "release", python_version, include=["pydantic"]
|
||||
)
|
||||
.get("pydantic", "0.0.0")
|
||||
)
|
||||
dir_min_pydantic_minor = dir_min_pydantic_version.split(".")[1] if "." in dir_min_pydantic_version else "0"
|
||||
|
||||
custom_mins = {
|
||||
# depends on pydantic-settings 2.4 which requires pydantic 2.7
|
||||
"libs/community": 7,
|
||||
}
|
||||
|
||||
max_pydantic_minor = min(
|
||||
int(dir_max_pydantic_minor),
|
||||
int(core_max_pydantic_minor),
|
||||
)
|
||||
min_pydantic_minor = max(
|
||||
int(dir_min_pydantic_minor),
|
||||
int(core_min_pydantic_minor),
|
||||
custom_mins.get(dir_, 0),
|
||||
)
|
||||
|
||||
configs = [
|
||||
{
|
||||
"working-directory": dir_,
|
||||
"pydantic-version": f"2.{v}.0",
|
||||
"python-version": python_version,
|
||||
}
|
||||
for v in range(min_pydantic_minor, max_pydantic_minor + 1)
|
||||
return [
|
||||
{"working-directory": dir_, "python-version": min_python},
|
||||
{"working-directory": dir_, "python-version": max_python},
|
||||
]
|
||||
return configs
|
||||
|
||||
|
||||
def _get_configs_for_multi_dirs(
|
||||
@@ -198,7 +142,7 @@ def _get_configs_for_multi_dirs(
|
||||
dirs_to_run["lint"] | dirs_to_run["test"] | dirs_to_run["extended-test"],
|
||||
dependents,
|
||||
)
|
||||
elif job in ["test", "compile-integration-tests", "dependencies", "test-pydantic"]:
|
||||
elif job in ["test", "compile-integration-tests", "dependencies"]:
|
||||
dirs = add_dependents(
|
||||
dirs_to_run["test"] | dirs_to_run["extended-test"], dependents
|
||||
)
|
||||
@@ -227,7 +171,6 @@ if __name__ == "__main__":
|
||||
dirs_to_run["lint"] = all_package_dirs()
|
||||
dirs_to_run["test"] = all_package_dirs()
|
||||
dirs_to_run["extended-test"] = set(LANGCHAIN_DIRS)
|
||||
|
||||
for file in files:
|
||||
if any(
|
||||
file.startswith(dir_)
|
||||
@@ -245,7 +188,6 @@ if __name__ == "__main__":
|
||||
if any(file.startswith(dir_) for dir_ in LANGCHAIN_DIRS):
|
||||
# add that dir and all dirs after in LANGCHAIN_DIRS
|
||||
# for extended testing
|
||||
|
||||
found = False
|
||||
for dir_ in LANGCHAIN_DIRS:
|
||||
if dir_ == "libs/core" and IGNORE_CORE_DEPENDENTS:
|
||||
@@ -299,7 +241,6 @@ if __name__ == "__main__":
|
||||
"extended-tests",
|
||||
"compile-integration-tests",
|
||||
"dependencies",
|
||||
"test-pydantic",
|
||||
]
|
||||
}
|
||||
map_job_to_configs["test-doc-imports"] = (
|
||||
|
||||
54
.github/scripts/get_min_versions.py
vendored
54
.github/scripts/get_min_versions.py
vendored
@@ -1,5 +1,4 @@
|
||||
import sys
|
||||
from typing import Optional
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
import tomllib
|
||||
@@ -8,9 +7,6 @@ else:
|
||||
import tomli as tomllib
|
||||
|
||||
from packaging.version import parse as parse_version
|
||||
from packaging.specifiers import SpecifierSet
|
||||
from packaging.version import Version
|
||||
|
||||
import re
|
||||
|
||||
MIN_VERSION_LIBS = [
|
||||
@@ -19,16 +15,10 @@ MIN_VERSION_LIBS = [
|
||||
"langchain",
|
||||
"langchain-text-splitters",
|
||||
"SQLAlchemy",
|
||||
"pydantic",
|
||||
]
|
||||
|
||||
# some libs only get checked on release because of simultaneous changes in
|
||||
# multiple libs
|
||||
SKIP_IF_PULL_REQUEST = [
|
||||
"langchain-core",
|
||||
"langchain-text-splitters",
|
||||
"langchain",
|
||||
"langchain-community",
|
||||
]
|
||||
SKIP_IF_PULL_REQUEST = ["langchain-core"]
|
||||
|
||||
|
||||
def get_min_version(version: str) -> str:
|
||||
@@ -56,13 +46,7 @@ def get_min_version(version: str) -> str:
|
||||
raise ValueError(f"Unrecognized version format: {version}")
|
||||
|
||||
|
||||
def get_min_version_from_toml(
|
||||
toml_path: str,
|
||||
versions_for: str,
|
||||
python_version: str,
|
||||
*,
|
||||
include: Optional[list] = None,
|
||||
):
|
||||
def get_min_version_from_toml(toml_path: str, versions_for: str):
|
||||
# Parse the TOML file
|
||||
with open(toml_path, "rb") as file:
|
||||
toml_data = tomllib.load(file)
|
||||
@@ -74,26 +58,18 @@ def get_min_version_from_toml(
|
||||
min_versions = {}
|
||||
|
||||
# Iterate over the libs in MIN_VERSION_LIBS
|
||||
for lib in set(MIN_VERSION_LIBS + (include or [])):
|
||||
for lib in MIN_VERSION_LIBS:
|
||||
if versions_for == "pull_request" and lib in SKIP_IF_PULL_REQUEST:
|
||||
# some libs only get checked on release because of simultaneous
|
||||
# changes in multiple libs
|
||||
# changes
|
||||
continue
|
||||
# Check if the lib is present in the dependencies
|
||||
if lib in dependencies:
|
||||
if include and lib not in include:
|
||||
continue
|
||||
# Get the version string
|
||||
version_string = dependencies[lib]
|
||||
|
||||
if isinstance(version_string, dict):
|
||||
version_string = version_string["version"]
|
||||
if isinstance(version_string, list):
|
||||
version_string = [
|
||||
vs
|
||||
for vs in version_string
|
||||
if check_python_version(python_version, vs["python"])
|
||||
][0]["version"]
|
||||
|
||||
# Use parse_version to get the minimum supported version from version_string
|
||||
min_version = get_min_version(version_string)
|
||||
@@ -104,31 +80,13 @@ def get_min_version_from_toml(
|
||||
return min_versions
|
||||
|
||||
|
||||
def check_python_version(version_string, constraint_string):
|
||||
"""
|
||||
Check if the given Python version matches the given constraints.
|
||||
|
||||
:param version_string: A string representing the Python version (e.g. "3.8.5").
|
||||
:param constraint_string: A string representing the package's Python version constraints (e.g. ">=3.6, <4.0").
|
||||
:return: True if the version matches the constraints, False otherwise.
|
||||
"""
|
||||
try:
|
||||
version = Version(version_string)
|
||||
constraints = SpecifierSet(constraint_string)
|
||||
return version in constraints
|
||||
except Exception as e:
|
||||
print(f"Error: {e}")
|
||||
return False
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Get the TOML file path from the command line argument
|
||||
toml_file = sys.argv[1]
|
||||
versions_for = sys.argv[2]
|
||||
python_version = sys.argv[3]
|
||||
assert versions_for in ["release", "pull_request"]
|
||||
|
||||
# Call the function to get the minimum versions
|
||||
min_versions = get_min_version_from_toml(toml_file, versions_for, python_version)
|
||||
min_versions = get_min_version_from_toml(toml_file, versions_for)
|
||||
|
||||
print(" ".join([f"{lib}=={version}" for lib, version in min_versions.items()]))
|
||||
|
||||
73
.github/workflows/_extended_test.yml
vendored
73
.github/workflows/_extended_test.yml
vendored
@@ -1,73 +0,0 @@
|
||||
name: Extended tests
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
working-directory:
|
||||
required: true
|
||||
type: string
|
||||
default: "libs/community"
|
||||
python-version:
|
||||
required: true
|
||||
type: string
|
||||
description: "Python version to use"
|
||||
default: "3.11"
|
||||
extended-deps-file:
|
||||
required: true
|
||||
type: choice
|
||||
description: "File to install extended dependencies from"
|
||||
options:
|
||||
- extended_testing_deps.txt
|
||||
- pdf_loader_deps.txt
|
||||
- other_deps.txt
|
||||
|
||||
env:
|
||||
POETRY_VERSION: "1.7.1"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
runs-on: ubuntu-latest
|
||||
name: Python ${{ inputs.python-version }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python ${{ inputs.python-version }} + Poetry ${{ env.POETRY_VERSION }}
|
||||
uses: "./.github/actions/poetry_setup"
|
||||
with:
|
||||
python-version: ${{ inputs.python-version }}
|
||||
poetry-version: ${{ env.POETRY_VERSION }}
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
cache-key: core
|
||||
|
||||
- name: Install extended dependencies
|
||||
shell: bash
|
||||
run: |
|
||||
poetry install --with test,test_integration
|
||||
poetry run pip install uv
|
||||
poetry run uv pip install -r extended_dependencies/${{ inputs.extended-deps-file }}
|
||||
|
||||
- name: Install deps outside pyproject
|
||||
if: ${{ startsWith(inputs.working-directory, 'libs/community/') }}
|
||||
shell: bash
|
||||
run: poetry run pip install "boto3<2" "google-cloud-aiplatform<2"
|
||||
|
||||
- name: Run extended tests
|
||||
shell: bash
|
||||
run: |
|
||||
make test
|
||||
make integration_tests
|
||||
|
||||
- name: Ensure the tests did not create any additional files
|
||||
shell: bash
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
STATUS="$(git status)"
|
||||
echo "$STATUS"
|
||||
|
||||
# grep will exit non-zero if the target message isn't found,
|
||||
# and `set -e` above will cause the step to fail.
|
||||
echo "$STATUS" | grep 'nothing to commit, working tree clean'
|
||||
65
.github/workflows/_integration_test.yml
vendored
65
.github/workflows/_integration_test.yml
vendored
@@ -1,6 +1,3 @@
|
||||
# Ignore changes to this file. Hijacking just to allow
|
||||
# testing of workflow dispatch on new workflow off of branch.
|
||||
|
||||
name: Integration tests
|
||||
|
||||
on:
|
||||
@@ -9,20 +6,10 @@ on:
|
||||
working-directory:
|
||||
required: true
|
||||
type: string
|
||||
default: "libs/community"
|
||||
python-version:
|
||||
required: true
|
||||
type: string
|
||||
description: "Python version to use"
|
||||
default: "3.11"
|
||||
extended-deps-file:
|
||||
required: true
|
||||
type: choice
|
||||
description: "File to install extended dependencies from"
|
||||
options:
|
||||
- extended_testing_deps.txt
|
||||
- pdf_loader_deps.txt
|
||||
- other_deps.txt
|
||||
|
||||
env:
|
||||
POETRY_VERSION: "1.7.1"
|
||||
@@ -45,22 +32,60 @@ jobs:
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
cache-key: core
|
||||
|
||||
- name: Install extended dependencies
|
||||
- name: Install dependencies
|
||||
shell: bash
|
||||
run: |
|
||||
poetry install --with test,test_integration
|
||||
poetry run pip install uv
|
||||
poetry run uv pip install -r extended_dependencies/${{ inputs.extended-deps-file }}
|
||||
run: poetry install --with test,test_integration
|
||||
|
||||
- name: Install deps outside pyproject
|
||||
if: ${{ startsWith(inputs.working-directory, 'libs/community/') }}
|
||||
shell: bash
|
||||
run: poetry run pip install "boto3<2" "google-cloud-aiplatform<2"
|
||||
|
||||
- name: Run extended tests
|
||||
- name: 'Authenticate to Google Cloud'
|
||||
id: 'auth'
|
||||
uses: google-github-actions/auth@v2
|
||||
with:
|
||||
credentials_json: '${{ secrets.GOOGLE_CREDENTIALS }}'
|
||||
|
||||
- name: Run integration tests
|
||||
shell: bash
|
||||
env:
|
||||
AI21_API_KEY: ${{ secrets.AI21_API_KEY }}
|
||||
FIREWORKS_API_KEY: ${{ secrets.FIREWORKS_API_KEY }}
|
||||
GOOGLE_API_KEY: ${{ secrets.GOOGLE_API_KEY }}
|
||||
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
AZURE_OPENAI_API_VERSION: ${{ secrets.AZURE_OPENAI_API_VERSION }}
|
||||
AZURE_OPENAI_API_BASE: ${{ secrets.AZURE_OPENAI_API_BASE }}
|
||||
AZURE_OPENAI_API_KEY: ${{ secrets.AZURE_OPENAI_API_KEY }}
|
||||
AZURE_OPENAI_CHAT_DEPLOYMENT_NAME: ${{ secrets.AZURE_OPENAI_CHAT_DEPLOYMENT_NAME }}
|
||||
AZURE_OPENAI_LLM_DEPLOYMENT_NAME: ${{ secrets.AZURE_OPENAI_LLM_DEPLOYMENT_NAME }}
|
||||
AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT_NAME: ${{ secrets.AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT_NAME }}
|
||||
MISTRAL_API_KEY: ${{ secrets.MISTRAL_API_KEY }}
|
||||
TOGETHER_API_KEY: ${{ secrets.TOGETHER_API_KEY }}
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
GROQ_API_KEY: ${{ secrets.GROQ_API_KEY }}
|
||||
NVIDIA_API_KEY: ${{ secrets.NVIDIA_API_KEY }}
|
||||
GOOGLE_SEARCH_API_KEY: ${{ secrets.GOOGLE_SEARCH_API_KEY }}
|
||||
GOOGLE_CSE_ID: ${{ secrets.GOOGLE_CSE_ID }}
|
||||
HUGGINGFACEHUB_API_TOKEN: ${{ secrets.HUGGINGFACEHUB_API_TOKEN }}
|
||||
EXA_API_KEY: ${{ secrets.EXA_API_KEY }}
|
||||
NOMIC_API_KEY: ${{ secrets.NOMIC_API_KEY }}
|
||||
WATSONX_APIKEY: ${{ secrets.WATSONX_APIKEY }}
|
||||
WATSONX_PROJECT_ID: ${{ secrets.WATSONX_PROJECT_ID }}
|
||||
PINECONE_API_KEY: ${{ secrets.PINECONE_API_KEY }}
|
||||
PINECONE_ENVIRONMENT: ${{ secrets.PINECONE_ENVIRONMENT }}
|
||||
ASTRA_DB_API_ENDPOINT: ${{ secrets.ASTRA_DB_API_ENDPOINT }}
|
||||
ASTRA_DB_APPLICATION_TOKEN: ${{ secrets.ASTRA_DB_APPLICATION_TOKEN }}
|
||||
ASTRA_DB_KEYSPACE: ${{ secrets.ASTRA_DB_KEYSPACE }}
|
||||
ES_URL: ${{ secrets.ES_URL }}
|
||||
ES_CLOUD_ID: ${{ secrets.ES_CLOUD_ID }}
|
||||
ES_API_KEY: ${{ secrets.ES_API_KEY }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # for airbyte
|
||||
MONGODB_ATLAS_URI: ${{ secrets.MONGODB_ATLAS_URI }}
|
||||
VOYAGE_API_KEY: ${{ secrets.VOYAGE_API_KEY }}
|
||||
COHERE_API_KEY: ${{ secrets.COHERE_API_KEY }}
|
||||
UPSTAGE_API_KEY: ${{ secrets.UPSTAGE_API_KEY }}
|
||||
run: |
|
||||
make test
|
||||
make integration_tests
|
||||
|
||||
- name: Ensure the tests did not create any additional files
|
||||
|
||||
4
.github/workflows/_release.yml
vendored
4
.github/workflows/_release.yml
vendored
@@ -164,7 +164,6 @@ jobs:
|
||||
|
||||
- name: Set up Python + Poetry ${{ env.POETRY_VERSION }}
|
||||
uses: "./.github/actions/poetry_setup"
|
||||
id: setup-python
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
poetry-version: ${{ env.POETRY_VERSION }}
|
||||
@@ -232,8 +231,7 @@ jobs:
|
||||
id: min-version
|
||||
run: |
|
||||
poetry run pip install packaging
|
||||
python_version="$(poetry run python --version | awk '{print $2}')"
|
||||
min_versions="$(poetry run python $GITHUB_WORKSPACE/.github/scripts/get_min_versions.py pyproject.toml release $python_version)"
|
||||
min_versions="$(poetry run python $GITHUB_WORKSPACE/.github/scripts/get_min_versions.py pyproject.toml release)"
|
||||
echo "min-versions=$min_versions" >> "$GITHUB_OUTPUT"
|
||||
echo "min-versions=$min_versions"
|
||||
|
||||
|
||||
39
.github/workflows/_test.yml
vendored
39
.github/workflows/_test.yml
vendored
@@ -27,12 +27,12 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ inputs.python-version }} + Poetry ${{ env.POETRY_VERSION }}
|
||||
uses: "./.github/actions/poetry_setup"
|
||||
id: setup-python
|
||||
with:
|
||||
python-version: ${{ inputs.python-version }}
|
||||
poetry-version: ${{ env.POETRY_VERSION }}
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
cache-key: core
|
||||
|
||||
- name: Install dependencies
|
||||
shell: bash
|
||||
run: poetry install --with test
|
||||
@@ -42,26 +42,6 @@ jobs:
|
||||
run: |
|
||||
make test
|
||||
|
||||
- name: Get minimum versions
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
id: min-version
|
||||
shell: bash
|
||||
run: |
|
||||
poetry run pip install packaging tomli
|
||||
python_version="$(poetry run python --version | awk '{print $2}')"
|
||||
min_versions="$(poetry run python $GITHUB_WORKSPACE/.github/scripts/get_min_versions.py pyproject.toml pull_request $python_version)"
|
||||
echo "min-versions=$min_versions" >> "$GITHUB_OUTPUT"
|
||||
echo "min-versions=$min_versions"
|
||||
|
||||
- name: Run unit tests with minimum dependency versions
|
||||
if: ${{ steps.min-version.outputs.min-versions != '' }}
|
||||
env:
|
||||
MIN_VERSIONS: ${{ steps.min-version.outputs.min-versions }}
|
||||
run: |
|
||||
poetry run pip install $MIN_VERSIONS
|
||||
make tests
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
|
||||
- name: Ensure the tests did not create any additional files
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -74,3 +54,20 @@ jobs:
|
||||
# and `set -e` above will cause the step to fail.
|
||||
echo "$STATUS" | grep 'nothing to commit, working tree clean'
|
||||
|
||||
- name: Get minimum versions
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
id: min-version
|
||||
run: |
|
||||
poetry run pip install packaging tomli
|
||||
min_versions="$(poetry run python $GITHUB_WORKSPACE/.github/scripts/get_min_versions.py pyproject.toml pull_request)"
|
||||
echo "min-versions=$min_versions" >> "$GITHUB_OUTPUT"
|
||||
echo "min-versions=$min_versions"
|
||||
|
||||
- name: Run unit tests with minimum dependency versions
|
||||
if: ${{ steps.min-version.outputs.min-versions != '' }}
|
||||
env:
|
||||
MIN_VERSIONS: ${{ steps.min-version.outputs.min-versions }}
|
||||
run: |
|
||||
poetry run pip install --force-reinstall $MIN_VERSIONS --editable .
|
||||
make tests
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
|
||||
64
.github/workflows/_test_pydantic.yml
vendored
64
.github/workflows/_test_pydantic.yml
vendored
@@ -1,64 +0,0 @@
|
||||
name: test pydantic intermediate versions
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
working-directory:
|
||||
required: true
|
||||
type: string
|
||||
description: "From which folder this pipeline executes"
|
||||
python-version:
|
||||
required: false
|
||||
type: string
|
||||
description: "Python version to use"
|
||||
default: "3.11"
|
||||
pydantic-version:
|
||||
required: true
|
||||
type: string
|
||||
description: "Pydantic version to test."
|
||||
|
||||
env:
|
||||
POETRY_VERSION: "1.7.1"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
runs-on: ubuntu-latest
|
||||
name: "make test # pydantic: ~=${{ inputs.pydantic-version }}, python: ${{ inputs.python-version }}, "
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python ${{ inputs.python-version }} + Poetry ${{ env.POETRY_VERSION }}
|
||||
uses: "./.github/actions/poetry_setup"
|
||||
with:
|
||||
python-version: ${{ inputs.python-version }}
|
||||
poetry-version: ${{ env.POETRY_VERSION }}
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
cache-key: core
|
||||
|
||||
- name: Install dependencies
|
||||
shell: bash
|
||||
run: poetry install --with test
|
||||
|
||||
- name: Overwrite pydantic version
|
||||
shell: bash
|
||||
run: poetry run pip install pydantic~=${{ inputs.pydantic-version }}
|
||||
|
||||
- name: Run core tests
|
||||
shell: bash
|
||||
run: |
|
||||
make test
|
||||
|
||||
- name: Ensure the tests did not create any additional files
|
||||
shell: bash
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
STATUS="$(git status)"
|
||||
echo "$STATUS"
|
||||
|
||||
# grep will exit non-zero if the target message isn't found,
|
||||
# and `set -e` above will cause the step to fail.
|
||||
echo "$STATUS" | grep 'nothing to commit, working tree clean'
|
||||
20
.github/workflows/check_diffs.yml
vendored
20
.github/workflows/check_diffs.yml
vendored
@@ -31,7 +31,6 @@ jobs:
|
||||
uses: Ana06/get-changed-files@v2.2.0
|
||||
- id: set-matrix
|
||||
run: |
|
||||
python -m pip install packaging
|
||||
python .github/scripts/check_diff.py ${{ steps.files.outputs.all }} >> $GITHUB_OUTPUT
|
||||
outputs:
|
||||
lint: ${{ steps.set-matrix.outputs.lint }}
|
||||
@@ -40,7 +39,6 @@ jobs:
|
||||
compile-integration-tests: ${{ steps.set-matrix.outputs.compile-integration-tests }}
|
||||
dependencies: ${{ steps.set-matrix.outputs.dependencies }}
|
||||
test-doc-imports: ${{ steps.set-matrix.outputs.test-doc-imports }}
|
||||
test-pydantic: ${{ steps.set-matrix.outputs.test-pydantic }}
|
||||
lint:
|
||||
name: cd ${{ matrix.job-configs.working-directory }}
|
||||
needs: [ build ]
|
||||
@@ -69,20 +67,6 @@ jobs:
|
||||
python-version: ${{ matrix.job-configs.python-version }}
|
||||
secrets: inherit
|
||||
|
||||
test-pydantic:
|
||||
name: cd ${{ matrix.job-configs.working-directory }}
|
||||
needs: [ build ]
|
||||
if: ${{ needs.build.outputs.test-pydantic != '[]' }}
|
||||
strategy:
|
||||
matrix:
|
||||
job-configs: ${{ fromJson(needs.build.outputs.test-pydantic) }}
|
||||
fail-fast: false
|
||||
uses: ./.github/workflows/_test_pydantic.yml
|
||||
with:
|
||||
working-directory: ${{ matrix.job-configs.working-directory }}
|
||||
pydantic-version: ${{ matrix.job-configs.pydantic-version }}
|
||||
secrets: inherit
|
||||
|
||||
test-doc-imports:
|
||||
needs: [ build ]
|
||||
if: ${{ needs.build.outputs.test-doc-imports != '[]' }}
|
||||
@@ -139,7 +123,7 @@ jobs:
|
||||
echo "Running extended tests, installing dependencies with poetry..."
|
||||
poetry install --with test
|
||||
poetry run pip install uv
|
||||
poetry run uv pip install -r extended_dependencies/extended_testing_deps.txt
|
||||
poetry run uv pip install -r extended_testing_deps.txt
|
||||
|
||||
- name: Run extended tests
|
||||
run: make extended_tests
|
||||
@@ -157,7 +141,7 @@ jobs:
|
||||
echo "$STATUS" | grep 'nothing to commit, working tree clean'
|
||||
ci_success:
|
||||
name: "CI Success"
|
||||
needs: [build, lint, test, compile-integration-tests, extended-tests, test-doc-imports, test-pydantic]
|
||||
needs: [build, lint, test, compile-integration-tests, extended-tests, test-doc-imports]
|
||||
if: |
|
||||
always()
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
2
.github/workflows/codespell.yml
vendored
2
.github/workflows/codespell.yml
vendored
@@ -3,7 +3,7 @@ name: CI / cd . / make spell_check
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master, v0.1, v0.2]
|
||||
branches: [master, v0.1]
|
||||
pull_request:
|
||||
|
||||
permissions:
|
||||
|
||||
@@ -39,7 +39,7 @@ conda install langchain -c conda-forge
|
||||
For these applications, LangChain simplifies the entire application lifecycle:
|
||||
|
||||
- **Open-source libraries**: Build your applications using LangChain's open-source [building blocks](https://python.langchain.com/v0.2/docs/concepts#langchain-expression-language-lcel), [components](https://python.langchain.com/v0.2/docs/concepts), and [third-party integrations](https://python.langchain.com/v0.2/docs/integrations/platforms/).
|
||||
Use [LangGraph](https://langchain-ai.github.io/langgraph/) to build stateful agents with first-class streaming and human-in-the-loop support.
|
||||
Use [LangGraph](/docs/concepts/#langgraph) to build stateful agents with first-class streaming and human-in-the-loop support.
|
||||
- **Productionization**: Inspect, monitor, and evaluate your apps with [LangSmith](https://docs.smith.langchain.com/) so that you can constantly optimize and deploy with confidence.
|
||||
- **Deployment**: Turn your LangGraph applications into production-ready APIs and Assistants with [LangGraph Cloud](https://langchain-ai.github.io/langgraph/cloud/).
|
||||
|
||||
@@ -49,7 +49,7 @@ For these applications, LangChain simplifies the entire application lifecycle:
|
||||
- **`langchain-community`**: Third party integrations.
|
||||
- Some integrations have been further split into **partner packages** that only rely on **`langchain-core`**. Examples include **`langchain_openai`** and **`langchain_anthropic`**.
|
||||
- **`langchain`**: Chains, agents, and retrieval strategies that make up an application's cognitive architecture.
|
||||
- **[`LangGraph`](https://langchain-ai.github.io/langgraph/)**: A library for building robust and stateful multi-actor applications with LLMs by modeling steps as edges and nodes in a graph. Integrates smoothly with LangChain, but can be used without it. To learn more about LangGraph, check out our first LangChain Academy course, *Introduction to LangGraph*, available [here](https://academy.langchain.com/courses/intro-to-langgraph).
|
||||
- **[`LangGraph`](https://langchain-ai.github.io/langgraph/)**: A library for building robust and stateful multi-actor applications with LLMs by modeling steps as edges and nodes in a graph. Integrates smoothly with LangChain, but can be used without it.
|
||||
|
||||
### Productionization:
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
:member-order: groupwise
|
||||
:show-inheritance: True
|
||||
:special-members: __call__
|
||||
:exclude-members: construct, copy, dict, from_orm, parse_file, parse_obj, parse_raw, schema, schema_json, update_forward_refs, validate, json, is_lc_serializable, to_json_not_implemented, lc_secrets, lc_attributes, lc_id, get_lc_namespace, astream_log, transform, atransform, get_output_schema, get_prompts, config_schema, map, pick, pipe, InputType, OutputType, config_specs, output_schema, get_input_schema, get_graph, get_name, input_schema, name, assign, as_tool, get_config_jsonschema, get_input_jsonschema, get_output_jsonschema, model_construct, model_copy, model_dump, model_dump_json, model_parametrized_name, model_post_init, model_rebuild, model_validate, model_validate_json, model_validate_strings, to_json, model_extra, model_fields_set, model_json_schema, predict, apredict, predict_messages, apredict_messages, generate, generate_prompt, agenerate, agenerate_prompt, call_as_llm
|
||||
:exclude-members: construct, copy, dict, from_orm, parse_file, parse_obj, parse_raw, schema, schema_json, update_forward_refs, validate, json, is_lc_serializable, to_json_not_implemented, lc_secrets, lc_attributes, lc_id, get_lc_namespace, astream_log, transform, atransform, get_output_schema, get_prompts, config_schema, map, pick, pipe, with_listeners, with_alisteners, with_config, with_fallbacks, with_types, with_retry, InputType, OutputType, config_specs, output_schema, get_input_schema, get_graph, get_name, input_schema, name, bind, assign, as_tool, get_config_jsonschema, get_input_jsonschema, get_output_jsonschema, model_construct, model_copy, model_dump, model_dump_json, model_parametrized_name, model_post_init, model_rebuild, model_validate, model_validate_json, model_validate_strings, to_json, model_extra, model_fields_set, model_json_schema
|
||||
|
||||
.. NOTE:: {{objname}} implements the standard :py:class:`Runnable Interface <langchain_core.runnables.base.Runnable>`. 🏃
|
||||
|
||||
|
||||
@@ -595,10 +595,10 @@ tool_call = ai_msg.tool_calls[0]
|
||||
# -> ToolCall(args={...}, id=..., ...)
|
||||
tool_message = tool.invoke(tool_call)
|
||||
# -> ToolMessage(
|
||||
# content="tool result foobar...",
|
||||
# tool_call_id=...,
|
||||
# name="tool_name"
|
||||
# )
|
||||
content="tool result foobar...",
|
||||
tool_call_id=...,
|
||||
name="tool_name"
|
||||
)
|
||||
```
|
||||
|
||||
If you are invoking the tool this way and want to include an [artifact](/docs/concepts/#toolmessage) for the ToolMessage, you will need to have the tool return two things.
|
||||
@@ -717,6 +717,8 @@ During run-time LangChain configures an appropriate callback manager (e.g., [Cal
|
||||
|
||||
The `callbacks` property is available on most objects throughout the API (Models, Tools, Agents, etc.) in two different places:
|
||||
|
||||
The callbacks are available on most objects throughout the API (Models, Tools, Agents, etc.) in two different places:
|
||||
|
||||
- **Request time callbacks**: Passed at the time of the request in addition to the input data.
|
||||
Available on all standard `Runnable` objects. These callbacks are INHERITED by all children
|
||||
of the object they are defined on. For example, `chain.invoke({"number": 25}, {"callbacks": [handler]})`.
|
||||
|
||||
@@ -206,7 +206,7 @@
|
||||
" ) -> List[Document]:\n",
|
||||
" \"\"\"Get docs, adding score information.\"\"\"\n",
|
||||
" docs, scores = zip(\n",
|
||||
" *self.vectorstore.similarity_search_with_score(query, **search_kwargs)\n",
|
||||
" *vectorstore.similarity_search_with_score(query, **search_kwargs)\n",
|
||||
" )\n",
|
||||
" for doc, score in zip(docs, scores):\n",
|
||||
" doc.metadata[\"score\"] = score\n",
|
||||
|
||||
@@ -15,15 +15,43 @@
|
||||
"\n",
|
||||
"Make sure you have the integration packages installed for any model providers you want to support. E.g. you should have `langchain-openai` installed to init an OpenAI model.\n",
|
||||
"\n",
|
||||
":::\n",
|
||||
"\n",
|
||||
":::info Requires ``langchain >= 0.2.8``\n",
|
||||
"\n",
|
||||
"This functionality was added in ``langchain-core == 0.2.8``. Please make sure your package is up to date.\n",
|
||||
"\n",
|
||||
":::"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"execution_count": 1,
|
||||
"id": "165b0de6-9ae3-4e3d-aa98-4fc8a97c4a06",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"metadata": {
|
||||
"execution": {
|
||||
"iopub.execute_input": "2024-09-10T20:22:32.858670Z",
|
||||
"iopub.status.busy": "2024-09-10T20:22:32.858278Z",
|
||||
"iopub.status.idle": "2024-09-10T20:22:33.009452Z",
|
||||
"shell.execute_reply": "2024-09-10T20:22:33.007022Z"
|
||||
}
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"zsh:1: 0.2.8 not found\r\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Note: you may need to restart the kernel to use updated packages.\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"%pip install -qU langchain>=0.2.8 langchain-openai langchain-anthropic langchain-google-vertexai"
|
||||
]
|
||||
|
||||
@@ -1,9 +1,178 @@
|
||||
# How to use LangChain with different Pydantic versions
|
||||
|
||||
As of the `0.3` release, LangChain uses Pydantic 2 internally.
|
||||
- Pydantic v2 was released in June, 2023 (https://docs.pydantic.dev/2.0/blog/pydantic-v2-final/).
|
||||
- v2 contains has a number of breaking changes (https://docs.pydantic.dev/2.0/migration/).
|
||||
- Pydantic 1 End of Life was in June 2024. LangChain will be dropping support for Pydantic 1 in the near future,
|
||||
and likely migrating internally to Pydantic 2. The timeline is tentatively September. This change will be accompanied by a minor version bump in the main langchain packages to version 0.3.x.
|
||||
|
||||
Users should install Pydantic 2 and are advised to **avoid** using the `pydantic.v1` namespace of Pydantic 2 with
|
||||
LangChain APIs.
|
||||
As of `langchain>=0.0.267`, LangChain allows users to install either Pydantic V1 or V2.
|
||||
|
||||
If you're working with prior versions of LangChain, please see the following guide
|
||||
on [Pydantic compatibility](https://python.langchain.com/v0.2/docs/how_to/pydantic_compatibility).
|
||||
Internally, LangChain continues to use the [Pydantic V1](https://docs.pydantic.dev/latest/migration/#continue-using-pydantic-v1-features) via
|
||||
the v1 namespace of Pydantic 2.
|
||||
|
||||
Because Pydantic does not support mixing .v1 and .v2 objects, users should be aware of a number of issues
|
||||
when using LangChain with Pydantic.
|
||||
|
||||
:::caution
|
||||
While LangChain supports Pydantic V2 objects in some APIs (listed below), it's suggested that users keep using Pydantic V1 objects until LangChain 0.3 is released.
|
||||
:::
|
||||
|
||||
|
||||
## 1. Passing Pydantic objects to LangChain APIs
|
||||
|
||||
Most LangChain APIs for *tool usage* (see list below) have been updated to accept either Pydantic v1 or v2 objects.
|
||||
|
||||
* Pydantic v1 objects correspond to subclasses of `pydantic.BaseModel` if `pydantic 1` is installed or subclasses of `pydantic.v1.BaseModel` if `pydantic 2` is installed.
|
||||
* Pydantic v2 objects correspond to subclasses of `pydantic.BaseModel` if `pydantic 2` is installed.
|
||||
|
||||
|
||||
| API | Pydantic 1 | Pydantic 2 |
|
||||
|----------------------------------------|------------|----------------------------------------------------------------|
|
||||
| `BaseChatModel.bind_tools` | Yes | langchain-core>=0.2.23, appropriate version of partner package |
|
||||
| `BaseChatModel.with_structured_output` | Yes | langchain-core>=0.2.23, appropriate version of partner package |
|
||||
| `Tool.from_function` | Yes | langchain-core>=0.2.23 |
|
||||
| `StructuredTool.from_function` | Yes | langchain-core>=0.2.23 |
|
||||
|
||||
|
||||
Partner packages that accept pydantic v2 objects via `bind_tools` or `with_structured_output` APIs:
|
||||
|
||||
| Package Name | pydantic v1 | pydantic v2 |
|
||||
|---------------------|-------------|-------------|
|
||||
| langchain-mistralai | Yes | >=0.1.11 |
|
||||
| langchain-anthropic | Yes | >=0.1.21 |
|
||||
| langchain-robocorp | Yes | >=0.0.10 |
|
||||
| langchain-openai | Yes | >=0.1.19 |
|
||||
| langchain-fireworks | Yes | >=0.1.5 |
|
||||
| langchain-aws | Yes | >=0.1.15 |
|
||||
|
||||
Additional partner packages will be updated to accept Pydantic v2 objects in the future.
|
||||
|
||||
If you are still seeing issues with these APIs or other APIs that accept Pydantic objects, please open an issue, and we'll
|
||||
address it.
|
||||
|
||||
Example:
|
||||
|
||||
Prior to `langchain-core<0.2.23`, use Pydantic v1 objects when passing to LangChain APIs.
|
||||
|
||||
|
||||
```python
|
||||
from langchain_openai import ChatOpenAI
|
||||
from pydantic.v1 import BaseModel # <-- Note v1 namespace
|
||||
|
||||
class Person(BaseModel):
|
||||
"""Personal information"""
|
||||
name: str
|
||||
|
||||
model = ChatOpenAI()
|
||||
model = model.with_structured_output(Person)
|
||||
|
||||
model.invoke('Bob is a person.')
|
||||
```
|
||||
|
||||
After `langchain-core>=0.2.23`, use either Pydantic v1 or v2 objects when passing to LangChain APIs.
|
||||
|
||||
```python
|
||||
from langchain_openai import ChatOpenAI
|
||||
from pydantic import BaseModel
|
||||
|
||||
class Person(BaseModel):
|
||||
"""Personal information"""
|
||||
name: str
|
||||
|
||||
|
||||
model = ChatOpenAI()
|
||||
model = model.with_structured_output(Person)
|
||||
|
||||
model.invoke('Bob is a person.')
|
||||
```
|
||||
|
||||
## 2. Sub-classing LangChain models
|
||||
|
||||
Because LangChain internally uses Pydantic v1, if you are sub-classing LangChain models, you should use Pydantic v1
|
||||
primitives.
|
||||
|
||||
|
||||
**Example 1: Extending via inheritance**
|
||||
|
||||
**YES**
|
||||
|
||||
```python
|
||||
from pydantic.v1 import validator
|
||||
from langchain_core.tools import BaseTool
|
||||
|
||||
class CustomTool(BaseTool): # BaseTool is v1 code
|
||||
x: int = Field(default=1)
|
||||
|
||||
def _run(*args, **kwargs):
|
||||
return "hello"
|
||||
|
||||
@validator('x') # v1 code
|
||||
@classmethod
|
||||
def validate_x(cls, x: int) -> int:
|
||||
return 1
|
||||
|
||||
|
||||
CustomTool(
|
||||
name='custom_tool',
|
||||
description="hello",
|
||||
x=1,
|
||||
)
|
||||
```
|
||||
|
||||
Mixing Pydantic v2 primitives with Pydantic v1 primitives can raise cryptic errors
|
||||
|
||||
**NO**
|
||||
|
||||
```python
|
||||
from pydantic import Field, field_validator # pydantic v2
|
||||
from langchain_core.tools import BaseTool
|
||||
|
||||
class CustomTool(BaseTool): # BaseTool is v1 code
|
||||
x: int = Field(default=1)
|
||||
|
||||
def _run(*args, **kwargs):
|
||||
return "hello"
|
||||
|
||||
@field_validator('x') # v2 code
|
||||
@classmethod
|
||||
def validate_x(cls, x: int) -> int:
|
||||
return 1
|
||||
|
||||
|
||||
CustomTool(
|
||||
name='custom_tool',
|
||||
description="hello",
|
||||
x=1,
|
||||
)
|
||||
```
|
||||
|
||||
|
||||
## 3. Disable run-time validation for LangChain objects used inside Pydantic v2 models
|
||||
|
||||
e.g.,
|
||||
|
||||
```python
|
||||
from typing import Annotated
|
||||
|
||||
from langchain_openai import ChatOpenAI # <-- ChatOpenAI uses pydantic v1
|
||||
from pydantic import BaseModel, SkipValidation
|
||||
|
||||
|
||||
class Foo(BaseModel): # <-- BaseModel is from Pydantic v2
|
||||
model: Annotated[ChatOpenAI, SkipValidation()]
|
||||
|
||||
Foo(model=ChatOpenAI(api_key="hello"))
|
||||
```
|
||||
|
||||
## 4: LangServe cannot generate OpenAPI docs if running Pydantic 2
|
||||
|
||||
If you are using Pydantic 2, you will not be able to generate OpenAPI docs using LangServe.
|
||||
|
||||
If you need OpenAPI docs, your options are to either install Pydantic 1:
|
||||
|
||||
`pip install pydantic==1.10.17`
|
||||
|
||||
or else to use the `APIHandler` object in LangChain to manually create the
|
||||
routes for your API.
|
||||
|
||||
See: https://python.langchain.com/docs/langserve/#pydantic
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
"\n",
|
||||
"::: {.callout-warning}\n",
|
||||
"\n",
|
||||
"The Anthropic API officially supports tool-calling so this workaround is no longer needed. Please use [ChatAnthropic](/docs/integrations/chat/anthropic) with `langchain-anthropic>=0.1.15`.\n",
|
||||
"The Anthropic API officially supports tool-calling so this workaround is no longer needed. Please use [ChatAnthropic](/docs/integrations/chat/anthropic) with `langchain-anthropic>=0.1.5`.\n",
|
||||
"\n",
|
||||
":::\n",
|
||||
"\n",
|
||||
|
||||
@@ -61,7 +61,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"%pip install -qU firecrawl-py==0.0.20 langchain_community"
|
||||
"%pip install -qU firecrawl-py langchain_community"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
Install the python SDK:
|
||||
|
||||
```bash
|
||||
pip install firecrawl-py==0.0.20
|
||||
pip install firecrawl-py
|
||||
```
|
||||
|
||||
## Document loader
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"%pip install --upgrade --quiet langchain-google-community"
|
||||
"%pip install --upgrade --quiet langchain_google_community"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -99,7 +99,7 @@
|
||||
"vector_store = Chroma(\n",
|
||||
" collection_name=\"example_collection\",\n",
|
||||
" embedding_function=embeddings,\n",
|
||||
" persist_directory=\"./chroma_langchain_db\", # Where to save data locally, remove if not necessary\n",
|
||||
" persist_directory=\"./chroma_langchain_db\", # Where to save data locally, remove if not neccesary\n",
|
||||
")"
|
||||
]
|
||||
},
|
||||
@@ -179,7 +179,7 @@
|
||||
"from langchain_core.documents import Document\n",
|
||||
"\n",
|
||||
"document_1 = Document(\n",
|
||||
" page_content=\"I had chocolate chip pancakes and scrambled eggs for breakfast this morning.\",\n",
|
||||
" page_content=\"I had chocalate chip pancakes and scrambled eggs for breakfast this morning.\",\n",
|
||||
" metadata={\"source\": \"tweet\"},\n",
|
||||
" id=1,\n",
|
||||
")\n",
|
||||
@@ -273,7 +273,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"updated_document_1 = Document(\n",
|
||||
" page_content=\"I had chocolate chip pancakes and fried eggs for breakfast this morning.\",\n",
|
||||
" page_content=\"I had chocalate chip pancakes and fried eggs for breakfast this morning.\",\n",
|
||||
" metadata={\"source\": \"tweet\"},\n",
|
||||
" id=1,\n",
|
||||
")\n",
|
||||
@@ -287,7 +287,7 @@
|
||||
"vector_store.update_document(document_id=uuids[0], document=updated_document_1)\n",
|
||||
"# You can also update multiple documents at once\n",
|
||||
"vector_store.update_documents(\n",
|
||||
" ids=uuids[:2], documents=[updated_document_1, updated_document_2]\n",
|
||||
" ids=uuids[:2], documents=[updated_document_1, updated_document_1]\n",
|
||||
")"
|
||||
]
|
||||
},
|
||||
|
||||
@@ -55,7 +55,7 @@ These are the best ones to get started with:
|
||||
- [Build an Agent](/docs/tutorials/agents)
|
||||
- [Introduction to LangGraph](https://langchain-ai.github.io/langgraph/tutorials/introduction/)
|
||||
|
||||
Explore the full list of LangChain tutorials [here](/docs/tutorials), and check out other [LangGraph tutorials here](https://langchain-ai.github.io/langgraph/tutorials/). To learn more about LangGraph, check out our first LangChain Academy course, *Introduction to LangGraph*, available [here](https://academy.langchain.com/courses/intro-to-langgraph).
|
||||
Explore the full list of LangChain tutorials [here](/docs/tutorials), and check out other [LangGraph tutorials here](https://langchain-ai.github.io/langgraph/tutorials/).
|
||||
|
||||
|
||||
## [How-to guides](/docs/how_to)
|
||||
@@ -85,8 +85,8 @@ Build stateful, multi-actor applications with LLMs. Integrates smoothly with Lan
|
||||
|
||||
## Additional resources
|
||||
|
||||
### [Versions](/docs/versions/v0_3/)
|
||||
See what changed in v0.3, learn how to migrate legacy code, read up on our versioning policies, and more.
|
||||
### [Versions](/docs/versions/overview/)
|
||||
See what changed in v0.2, learn how to migrate legacy code, and read up on our release/versioning policies, and more.
|
||||
|
||||
### [Security](/docs/security)
|
||||
Read up on [security](/docs/security) best practices to make sure you're developing safely with LangChain.
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
---
|
||||
sidebar_position: 0
|
||||
sidebar_label: Overview of v0.2
|
||||
---
|
||||
|
||||
# Overview
|
||||
# Overview of LangChain v0.2
|
||||
|
||||
## What’s new in LangChain?
|
||||
|
||||
@@ -8,7 +8,7 @@ keywords: [retrievalqa, llmchain, conversationalretrievalchain]
|
||||
|
||||
This code contains a list of deprecations and removals in the `langchain` and `langchain-core` packages.
|
||||
|
||||
New features and improvements are not listed here. See the [overview](/docs/versions/v0_2/overview/) for a summary of what's new in this release.
|
||||
New features and improvements are not listed here. See the [overview](/docs/versions/overview/) for a summary of what's new in this release.
|
||||
|
||||
## Breaking changes
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
sidebar_position: 1
|
||||
---
|
||||
|
||||
# Migration
|
||||
# Migrating to LangChain v0.2
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ sidebar_position: 2
|
||||
sidebar_label: astream_events v2
|
||||
---
|
||||
|
||||
# Migrating to astream_events(..., version="v2")
|
||||
# Migrating to Astream Events v2
|
||||
|
||||
We've added a `v2` of the astream_events API with the release of `0.2.x`. You can see this [PR](https://github.com/langchain-ai/langchain/pull/21638) for more details.
|
||||
|
||||
|
||||
@@ -1,271 +0,0 @@
|
||||
# LangChain v0.3
|
||||
|
||||
*Last updated: 09.16.24*
|
||||
|
||||
## What's changed
|
||||
|
||||
* All packages have been upgraded from Pydantic 1 to Pydantic 2 internally. Use of Pydantic 2 in user code is fully supported with all packages without the need for bridges like `langchain_core.pydantic_v1` or `pydantic.v1`.
|
||||
* Pydantic 1 will no longer be supported as it reached its end-of-life in June 2024.
|
||||
* Python 3.8 will no longer be supported as its end-of-life is October 2024.
|
||||
|
||||
**These are the only breaking changes.**
|
||||
|
||||
## What’s new
|
||||
|
||||
The following features have been added during the development of 0.2.x:
|
||||
|
||||
- Moved more integrations from `langchain-community` to their own `langchain-x` packages. This is a non-breaking change, as the legacy implementations are left in `langchain-community` and marked as deprecated. This allows us to better manage the dependencies of, test, and version these integrations. You can see all the latest integration packages in the [API reference](https://python.langchain.com/v0.2/api_reference/reference.html#integrations).
|
||||
- Simplified tool definition and usage. Read more [here](https://blog.langchain.dev/improving-core-tool-interfaces-and-docs-in-langchain/).
|
||||
- Added utilities for interacting with chat models: [universal model constructor](https://python.langchain.com/v0.2/docs/how_to/chat_models_universal_init/), [rate limiter](https://python.langchain.com/v0.2/docs/how_to/chat_model_rate_limiting/), [message utilities](https://python.langchain.com/v0.2/docs/how_to/#messages),
|
||||
- Added the ability to [dispatch custom events](https://python.langchain.com/v0.2/docs/how_to/callbacks_custom_events/).
|
||||
- Revamped integration docs and API reference. Read more [here](https://blog.langchain.dev/langchain-integration-docs-revamped/).
|
||||
- Marked as deprecated a number of legacy chains and added migration guides for all of them. These are slated for removal in `langchain` 1.0.0. See the deprecated chains and associated [migration guides here](https://python.langchain.com/v0.2/docs/versions/migrating_chains/).
|
||||
|
||||
## How to update your code
|
||||
|
||||
If you're using `langchain` / `langchain-community` / `langchain-core` 0.0 or 0.1, we recommend that you first [upgrade to 0.2](https://python.langchain.com/v0.2/docs/versions/v0_2/).
|
||||
|
||||
If you're using `langgraph`, upgrade to `langgraph>=0.2.20,<0.3`. This will work with either 0.2 or 0.3 versions of all the base packages.
|
||||
|
||||
Here is a complete list of all packages that have been released and what we recommend upgrading your version constraints to.
|
||||
Any package that now requires `langchain-core` 0.3 had a minor version bump.
|
||||
Any package that is now compatible with both `langchain-core` 0.2 and 0.3 had a patch version bump.
|
||||
|
||||
You can use the `langchain-cli` to update deprecated imports automatically.
|
||||
The CLI will handle updating deprecated imports that were introduced in LangChain 0.0.x and LangChain 0.1, as
|
||||
well as updating the `langchain_core.pydantic_v1` and `langchain.pydantic_v1` imports.
|
||||
|
||||
|
||||
### Base packages
|
||||
|
||||
| Package | Latest | Recommended constraint |
|
||||
|--------------------------|--------|------------------------|
|
||||
| langchain | 0.3.0 | >=0.3,<0.4 |
|
||||
| langchain-community | 0.3.0 | >=0.3,<0.4 |
|
||||
| langchain-text-splitters | 0.3.0 | >=0.3,<0.4 |
|
||||
| langchain-core | 0.3.0 | >=0.3,<0.4 |
|
||||
| langchain-experimental | 0.3.0 | >=0.3,<0.4 |
|
||||
|
||||
### Downstream packages
|
||||
|
||||
| Package | Latest | Recommended constraint |
|
||||
|-----------|--------|------------------------|
|
||||
| langgraph | 0.2.20 | >=0.2.20,<0.3 |
|
||||
| langserve | 0.3.0 | >=0.3,<0.4 |
|
||||
|
||||
### Integration packages
|
||||
|
||||
| Package | Latest | Recommended constraint |
|
||||
| -------------------------------------- | ------- | -------------------------- |
|
||||
| langchain-ai21 | 0.2.0 | >=0.2,<0.3 |
|
||||
| langchain-aws | 0.2.0 | >=0.2,<0.3 |
|
||||
| langchain-anthropic | 0.2.0 | >=0.2,<0.3 |
|
||||
| langchain-astradb | 0.4.1 | >=0.4.1,<0.5 |
|
||||
| langchain-azure-dynamic-sessions | 0.2.0 | >=0.2,<0.3 |
|
||||
| langchain-box | 0.2.0 | >=0.2,<0.3 |
|
||||
| langchain-chroma | 0.1.4 | >=0.1.4,<0.2 |
|
||||
| langchain-cohere | 0.3.0 | >=0.3,<0.4 |
|
||||
| langchain-elasticsearch | 0.3.0 | >=0.3,<0.4 |
|
||||
| langchain-exa | 0.2.0 | >=0.2,<0.3 |
|
||||
| langchain-fireworks | 0.2.0 | >=0.2,<0.3 |
|
||||
| langchain-groq | 0.2.0 | >=0.2,<0.3 |
|
||||
| langchain-google-community | 2.0.0 | >=2,<3 |
|
||||
| langchain-google-genai | 2.0.0 | >=2,<3 |
|
||||
| langchain-google-vertexai | 2.0.0 | >=2,<3 |
|
||||
| langchain-huggingface | 0.1.0 | >=0.1,<0.2 |
|
||||
| langchain-ibm | 0.2.0 | >=0.2,<0.3 |
|
||||
| langchain-milvus | 0.1.6 | >=0.1.6,<0.2 |
|
||||
| langchain-mistralai | 0.2.0 | >=0.2,<0.3 |
|
||||
| langchain-mongodb | 0.2.0 | >=0.2,<0.3 |
|
||||
| langchain-nomic | 0.1.3 | >=0.1.3,<0.2 |
|
||||
| langchain-ollama | 0.2.0 | >=0.2,<0.3 |
|
||||
| langchain-openai | 0.2.0 | >=0.2,<0.3 |
|
||||
| langchain-pinecone | 0.2.0 | >=0.2,<0.3 |
|
||||
| langchain-postgres | 0.0.13 | >=0.0.13,<0.1 |
|
||||
| langchain-prompty | 0.1.0 | >=0.1,<0.2 |
|
||||
| langchain-qdrant | 0.1.4 | >=0.1.4,<0.2 |
|
||||
| langchain-redis | 0.1.0 | >=0.1,<0.2 |
|
||||
| langchain-sema4 | 0.2.0 | >=0.2,<0.3 |
|
||||
| langchain-together | 0.2.0 | >=0.2,<0.3 |
|
||||
| langchain-unstructured | 0.1.4 | >=0.1.4,<0.2 |
|
||||
| langchain-upstage | 0.3.0 | >=0.3,<0.4 |
|
||||
| langchain-voyageai | 0.2.0 | >=0.2,<0.3 |
|
||||
| langchain-weaviate | 0.0.3 | >=0.0.3,<0.1 |
|
||||
|
||||
Once you've updated to recent versions of the packages, you may need to address the following issues stemming from the internal switch from Pydantic v1 to Pydantic v2:
|
||||
|
||||
- If your code depends on Pydantic aside from LangChain, you will need to upgrade your pydantic version constraints to be `pydantic>=2,<3`. See [Pydantic’s migration guide](https://docs.pydantic.dev/latest/migration/) for help migrating your non-LangChain code to Pydantic v2 if you use pydantic v1.
|
||||
- There are a number of side effects to LangChain components caused by the internal switch from Pydantic v1 to v2. We have listed some of the common cases below together with the recommended solutions.
|
||||
|
||||
## Common issues when transitioning to Pydantic 2
|
||||
|
||||
### 1. Do not use the `langchain_core.pydantic_v1` namespace
|
||||
|
||||
Replace any usage of `langchain_core.pydantic_v1` or `langchain.pydantic_v1` with
|
||||
direct imports from `pydantic`.
|
||||
|
||||
For example,
|
||||
|
||||
```python
|
||||
from langchain_core.pydantic_v1 import BaseModel
|
||||
```
|
||||
|
||||
to:
|
||||
|
||||
```python
|
||||
from pydantic import BaseModel
|
||||
```
|
||||
|
||||
This may require you to make additional updates to your Pydantic code given that there are a number of breaking changes in Pydantic 2. See the [Pydantic Migration](https://docs.pydantic.dev/latest/migration/) for how to upgrade your code from Pydantic 1 to 2.
|
||||
|
||||
### 2. Passing Pydantic objects to LangChain APIs
|
||||
|
||||
Users using the following APIs:
|
||||
|
||||
* `BaseChatModel.bind_tools`
|
||||
* `BaseChatModel.with_structured_output`
|
||||
* `Tool.from_function`
|
||||
* `StructuredTool.from_function`
|
||||
|
||||
should ensure that they are passing Pydantic 2 objects to these APIs rather than
|
||||
Pydantic 1 objects (created via the `pydantic.v1` namespace of pydantic 2).
|
||||
|
||||
:::caution
|
||||
While `v1` objets may be accepted by some of these APIs, users are advised to
|
||||
use Pydantic 2 objects to avoid future issues.
|
||||
:::
|
||||
|
||||
### 3. Sub-classing LangChain models
|
||||
|
||||
Any sub-classing from existing LangChain models (e.g., `BaseTool`, `BaseChatModel`, `LLM`)
|
||||
should upgrade to use Pydantic 2 features.
|
||||
|
||||
For example, any user code that's relying on Pydantic 1 features (e.g., `validator`) should
|
||||
be updated to the Pydantic 2 equivalent (e.g., `field_validator`), and any references to
|
||||
`pydantic.v1`, `langchain_core.pydantic_v1`, `langchain.pydantic_v1` should be replaced
|
||||
with imports from `pydantic`.
|
||||
|
||||
```python
|
||||
from pydantic.v1 import validator, Field # if pydantic 2 is installed
|
||||
# from pydantic import validator, Field # if pydantic 1 is installed
|
||||
# from langchain_core.pydantic_v1 import validator, Field
|
||||
# from langchain.pydantic_v1 import validator, Field
|
||||
|
||||
class CustomTool(BaseTool): # BaseTool is v1 code
|
||||
x: int = Field(default=1)
|
||||
|
||||
def _run(*args, **kwargs):
|
||||
return "hello"
|
||||
|
||||
@validator('x') # v1 code
|
||||
@classmethod
|
||||
def validate_x(cls, x: int) -> int:
|
||||
return 1
|
||||
```
|
||||
|
||||
Should change to:
|
||||
|
||||
```python
|
||||
from pydantic import Field, field_validator # pydantic v2
|
||||
from langchain_core.pydantic_v1 import BaseTool
|
||||
|
||||
class CustomTool(BaseTool): # BaseTool is v1 code
|
||||
x: int = Field(default=1)
|
||||
|
||||
def _run(*args, **kwargs):
|
||||
return "hello"
|
||||
|
||||
@field_validator('x') # v2 code
|
||||
@classmethod
|
||||
def validate_x(cls, x: int) -> int:
|
||||
return 1
|
||||
|
||||
|
||||
CustomTool(
|
||||
name='custom_tool',
|
||||
description="hello",
|
||||
x=1,
|
||||
)
|
||||
```
|
||||
|
||||
### 4. model_rebuild()
|
||||
|
||||
When sub-classing from LangChain models, users may need to add relevant imports
|
||||
to the file and rebuild the model.
|
||||
|
||||
You can read more about `model_rebuild` [here](https://docs.pydantic.dev/latest/concepts/models/#rebuilding-model-schema).
|
||||
|
||||
```python
|
||||
from langchain_core.output_parsers import BaseOutputParser
|
||||
|
||||
|
||||
class FooParser(BaseOutputParser):
|
||||
...
|
||||
```
|
||||
|
||||
New code:
|
||||
|
||||
```python
|
||||
from typing import Optional as Optional
|
||||
|
||||
from langchain_core.output_parsers import BaseOutputParser
|
||||
|
||||
class FooParser(BaseOutputParser):
|
||||
...
|
||||
|
||||
FooParser.model_rebuild()
|
||||
```
|
||||
|
||||
## Migrate using langchain-cli
|
||||
|
||||
The `langchain-cli` can help update deprecated LangChain imports in your code automatically.
|
||||
|
||||
Please note that the `langchain-cli` only handles deprecated LangChain imports and cannot
|
||||
help to upgrade your code from pydantic 1 to pydantic 2.
|
||||
|
||||
For help with the Pydantic 1 to 2 migration itself please refer to the [Pydantic Migration Guidelines](https://docs.pydantic.dev/latest/migration/).
|
||||
|
||||
As of 0.0.31, the `langchain-cli` relies on [gritql](https://about.grit.io/) for applying code mods.
|
||||
|
||||
### Installation
|
||||
|
||||
```bash
|
||||
pip install -U langchain-cli
|
||||
langchain-cli --version # <-- Make sure the version is at least 0.0.31
|
||||
```
|
||||
|
||||
### Usage
|
||||
|
||||
Given that the migration script is not perfect, you should make sure you have a backup of your code first (e.g., using version control like `git`).
|
||||
|
||||
The `langchain-cli` will handle the `langchain_core.pydantic_v1` deprecation introduced in LangChain 0.3 as well
|
||||
as older deprecations (e.g.,`from langchain.chat_models import ChatOpenAI` which should be `from langchain_openai import ChatOpenAI`),
|
||||
|
||||
You will need to run the migration script **twice** as it only applies one import replacement per run.
|
||||
|
||||
For example, say that your code is still using the old import `from langchain.chat_models import ChatOpenAI`:
|
||||
|
||||
After the first run, you’ll get: `from langchain_community.chat_models import ChatOpenAI`
|
||||
After the second run, you’ll get: `from langchain_openai import ChatOpenAI`
|
||||
|
||||
```bash
|
||||
# Run a first time
|
||||
# Will replace from langchain.chat_models import ChatOpenAI
|
||||
langchain-cli migrate --help [path to code] # Help
|
||||
langchain-cli migrate [path to code] # Apply
|
||||
|
||||
# Run a second time to apply more import replacements
|
||||
langchain-cli migrate --diff [path to code] # Preview
|
||||
langchain-cli migrate [path to code] # Apply
|
||||
```
|
||||
|
||||
### Other options
|
||||
|
||||
```bash
|
||||
# See help menu
|
||||
langchain-cli migrate --help
|
||||
# Preview Changes without applying
|
||||
langchain-cli migrate --diff [path to code]
|
||||
# Approve changes interactively
|
||||
langchain-cli migrate --interactive [path to code]
|
||||
```
|
||||
@@ -330,7 +330,7 @@ const config = {
|
||||
// this is linked to erick@langchain.dev currently
|
||||
apiKey: "6c01842d6a88772ed2236b9c85806441",
|
||||
|
||||
indexName: "python-langchain-latest",
|
||||
indexName: "python-langchain-0.2",
|
||||
|
||||
contextualSearch: false,
|
||||
},
|
||||
|
||||
@@ -72,24 +72,25 @@ module.exports = {
|
||||
collapsed: false,
|
||||
collapsible: false,
|
||||
items: [
|
||||
{
|
||||
type: 'doc',
|
||||
id: 'versions/v0_3/index',
|
||||
label: "v0.3",
|
||||
},
|
||||
{
|
||||
type: "category",
|
||||
label: "v0.2",
|
||||
items: [{
|
||||
type: 'autogenerated',
|
||||
dirName: 'versions/v0_2',
|
||||
}],
|
||||
},
|
||||
"versions/overview",
|
||||
"versions/release_policy",
|
||||
{
|
||||
type: 'doc',
|
||||
id: "how_to/pydantic_compatibility",
|
||||
label: "Pydantic compatibility",
|
||||
},
|
||||
{
|
||||
type: "category",
|
||||
label: "Migrating to v0.2",
|
||||
link: {type: 'doc', id: 'versions/v0_2/index'},
|
||||
collapsible: false,
|
||||
collapsed: false,
|
||||
items: [{
|
||||
type: 'autogenerated',
|
||||
dirName: 'versions/v0_2',
|
||||
className: 'hidden',
|
||||
}],
|
||||
},
|
||||
{
|
||||
type: "category",
|
||||
label: "Migrating from v0.0 chains",
|
||||
@@ -102,7 +103,6 @@ module.exports = {
|
||||
className: 'hidden',
|
||||
}],
|
||||
},
|
||||
"versions/release_policy",
|
||||
],
|
||||
},
|
||||
"security"
|
||||
|
||||
@@ -2538,389 +2538,5 @@ const suggestedLinks = {
|
||||
"alternative": [
|
||||
"/v0.1/docs/use_cases/web_scraping/"
|
||||
]
|
||||
},
|
||||
// below are new
|
||||
"/docs/modules/data_connection/document_transformers/text_splitters/": {"canonical": "/docs/how_to/#text-splitters", "alternative": ["/v0.1/docs/modules/data_connection/document_transformers/"]},
|
||||
"/docs/modules/data_connection/document_transformers/text_splitters/character_text_splitter/": {"canonical": "/docs/how_to/character_text_splitter/", "alternative": ["/v0.1/docs/modules/data_connection/document_transformers/character_text_splitter/"]},
|
||||
"/docs/modules/data_connection/document_transformers/text_splitters/code_splitter/": {"canonical": "/docs/how_to/code_splitter/", "alternative": ["/v0.1/docs/modules/data_connection/document_transformers/code_splitter/"]},
|
||||
"/docs/modules/data_connection/document_transformers/text_splitters/HTML_header_metadata/": {"canonical": "/docs/how_to/HTML_header_metadata_splitter/", "alternative": ["/v0.1/docs/modules/data_connection/document_transformers/HTML_header_metadata/"]},
|
||||
"/docs/modules/data_connection/document_transformers/text_splitters/HTML_section_aware_splitter/": {"canonical": "/docs/how_to/HTML_section_aware_splitter/", "alternative": ["/v0.1/docs/modules/data_connection/document_transformers/HTML_section_aware_splitter/"]},
|
||||
"/docs/modules/data_connection/document_transformers/text_splitters/markdown_header_metadata/": {"canonical": "/docs/how_to/markdown_header_metadata_splitter/", "alternative": ["/v0.1/docs/modules/data_connection/document_transformers/markdown_header_metadata/"]},
|
||||
"/docs/modules/data_connection/document_transformers/text_splitters/recursive_json_splitter/": {"canonical": "/docs/how_to/recursive_json_splitter/", "alternative": ["/v0.1/docs/modules/data_connection/document_transformers/recursive_json_splitter/"]},
|
||||
"/docs/modules/data_connection/document_transformers/text_splitters/recursive_text_splitter/": {"canonical": "/docs/how_to/recursive_text_splitter/", "alternative": ["/v0.1/docs/modules/data_connection/document_transformers/recursive_text_splitter/"]},
|
||||
"/docs/modules/data_connection/document_transformers/text_splitters/semantic-chunker/": {"canonical": "/docs/how_to/semantic-chunker/", "alternative": ["/v0.1/docs/modules/data_connection/document_transformers/semantic-chunker/"]},
|
||||
"/docs/modules/data_connection/document_transformers/text_splitters/split_by_token/": {"canonical": "/docs/how_to/split_by_token/", "alternative": ["/v0.1/docs/modules/data_connection/document_transformers/split_by_token/"]},
|
||||
"/docs/modules/model_io/prompts/prompt_templates/": {"canonical": "/docs/how_to/#prompt-templates", "alternative": ["/v0.1/docs/modules/model_io/prompts/"]},
|
||||
"/docs/modules/model_io/prompts/prompt_templates/composition/": {"canonical": "/docs/how_to/prompts_composition/", "alternative": ["/v0.1/docs/modules/model_io/prompts/composition/"]},
|
||||
"/docs/modules/model_io/prompts/prompt_templates/example_selectors/": {"canonical": "/docs/how_to/example_selectors/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/"]},
|
||||
"/docs/modules/model_io/prompts/prompt_templates/example_selectors/length_based/": {"canonical": "/docs/how_to/example_selectors_length_based/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/length_based/"]},
|
||||
"/docs/modules/model_io/prompts/prompt_templates/example_selectors/mmr/": {"canonical": "/docs/how_to/example_selectors_mmr/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/mmr/"]},
|
||||
"/docs/modules/model_io/prompts/prompt_templates/example_selectors/ngram_overlap/": {"canonical": "/docs/how_to/example_selectors_ngram/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/ngram_overlap/"]},
|
||||
"/docs/modules/model_io/prompts/prompt_templates/example_selectors/similarity/": {"canonical": "/docs/how_to/example_selectors_similarity/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/similarity/"]},
|
||||
"/docs/modules/model_io/prompts/prompt_templates/few_shot_examples_chat/": {"canonical": "/docs/how_to/few_shot_examples_chat/", "alternative": ["/v0.1/docs/modules/model_io/prompts/few_shot_examples_chat/"]},
|
||||
"/docs/modules/model_io/prompts/prompt_templates/few_shot_examples/": {"canonical": "/docs/how_to/few_shot_examples/", "alternative": ["/v0.1/docs/modules/model_io/prompts/few_shot_examples/"]},
|
||||
"/docs/modules/model_io/prompts/prompt_templates/partial/": {"canonical": "/docs/how_to/prompts_partial/", "alternative": ["/v0.1/docs/modules/model_io/prompts/partial/"]},
|
||||
"/docs/modules/model_io/prompts/prompt_templates/quick_start/": {"canonical": "/docs/how_to/#prompt-templates", "alternative": ["/v0.1/docs/modules/model_io/prompts/quick_start/"]},
|
||||
"/docs/modules/model_io/models/": {"canonical": "/docs/how_to/#chat-models", "alternative": ["/v0.1/docs/modules/model_io/"]},
|
||||
"/docs/modules/model_io/models/chat/": {"canonical": "/docs/how_to/#chat-models", "alternative": ["/v0.1/docs/modules/model_io/chat/"]},
|
||||
"/docs/modules/model_io/models/chat/chat_model_caching/": {"canonical": "/docs/how_to/chat_model_caching/", "alternative": ["/v0.1/docs/modules/model_io/chat/chat_model_caching/"]},
|
||||
"/docs/modules/model_io/models/chat/custom_chat_model/": {"canonical": "/docs/how_to/custom_chat_model/", "alternative": ["/v0.1/docs/modules/model_io/chat/custom_chat_model/"]},
|
||||
"/docs/modules/model_io/models/chat/function_calling/": {"canonical": "/docs/how_to/tool_calling/", "alternative": ["/v0.1/docs/modules/model_io/chat/function_calling/"]},
|
||||
"/docs/modules/model_io/models/chat/logprobs/": {"canonical": "/docs/how_to/logprobs/", "alternative": ["/v0.1/docs/modules/model_io/chat/logprobs/"]},
|
||||
"/docs/modules/model_io/models/chat/message_types/": {"canonical": "/docs/concepts/#messages", "alternative": ["/v0.1/docs/modules/model_io/chat/message_types/"]},
|
||||
"/docs/modules/model_io/models/chat/quick_start/": {"canonical": "/docs/tutorials/llm_chain/", "alternative": ["/v0.1/docs/modules/model_io/chat/quick_start/"]},
|
||||
"/docs/modules/model_io/models/chat/response_metadata/": {"canonical": "/docs/how_to/response_metadata/", "alternative": ["/v0.1/docs/modules/model_io/chat/response_metadata/"]},
|
||||
"/docs/modules/model_io/models/chat/streaming/": {"canonical": "/docs/how_to/streaming/", "alternative": ["/v0.1/docs/modules/model_io/chat/streaming/"]},
|
||||
"/docs/modules/model_io/models/chat/structured_output/": {"canonical": "/docs/how_to/structured_output/", "alternative": ["/v0.1/docs/modules/model_io/chat/structured_output/"]},
|
||||
"/docs/modules/model_io/models/chat/token_usage_tracking/": {"canonical": "/docs/how_to/chat_token_usage_tracking/", "alternative": ["/v0.1/docs/modules/model_io/chat/token_usage_tracking/"]},
|
||||
"/docs/modules/model_io/models/concepts/": {"canonical": "/docs/concepts/#chat-models", "alternative": ["/v0.1/docs/modules/model_io/concepts/"]},
|
||||
"/docs/modules/model_io/models/llms/": {"canonical": "/docs/concepts/#llms", "alternative": ["/v0.1/docs/modules/model_io/llms/"]},
|
||||
"/docs/modules/model_io/models/llms/custom_llm/": {"canonical": "/docs/how_to/custom_llm/", "alternative": ["/v0.1/docs/modules/model_io/llms/custom_llm/"]},
|
||||
"/docs/modules/model_io/models/llms/llm_caching/": {"canonical": "/docs/how_to/llm_caching/", "alternative": ["/v0.1/docs/modules/model_io/llms/llm_caching/"]},
|
||||
"/docs/modules/model_io/models/llms/quick_start/": {"canonical": "/docs/tutorials/llm_chain/", "alternative": ["/v0.1/docs/modules/model_io/llms/quick_start/"]},
|
||||
"/docs/modules/model_io/models/llms/streaming_llm/": {"canonical": "/docs/how_to/streaming_llm/", "alternative": ["/v0.1/docs/modules/model_io/llms/streaming_llm/"]},
|
||||
"/docs/modules/model_io/models/llms/token_usage_tracking/": {"canonical": "/docs/how_to/llm_token_usage_tracking/", "alternative": ["/v0.1/docs/modules/model_io/llms/token_usage_tracking/"]},
|
||||
"/docs/modules/model_io/models/output_parsers/": {"canonical": "/docs/how_to/#output-parsers", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/"]},
|
||||
"/docs/modules/model_io/models/output_parsers/custom/": {"canonical": "/docs/how_to/output_parser_custom/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/custom/"]},
|
||||
"/docs/modules/model_io/models/output_parsers/quick_start/": {"canonical": "/docs/how_to/output_parser_structured/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/quick_start/"]},
|
||||
"/docs/modules/model_io/models/output_parsers/types/csv/": {"canonical": "/docs/how_to/output_parser_structured/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/csv/"]},
|
||||
"/docs/modules/model_io/models/output_parsers/types/datetime/": {"canonical": "/docs/how_to/output_parser_structured/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/datetime/"]},
|
||||
"/docs/modules/model_io/models/output_parsers/types/enum/": {"canonical": "/docs/how_to/output_parser_structured/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/enum/"]},
|
||||
"/docs/modules/model_io/models/output_parsers/types/json/": {"canonical": "/docs/how_to/output_parser_json/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/json/"]},
|
||||
"/docs/modules/model_io/models/output_parsers/types/openai_functions/": {"canonical": "/docs/how_to/structured_output/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/openai_functions/"]},
|
||||
"/docs/modules/model_io/models/output_parsers/types/openai_tools/": {"canonical": "/docs/how_to/tool_calling/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/openai_tools/"]},
|
||||
"/docs/modules/model_io/models/output_parsers/types/output_fixing/": {"canonical": "/docs/how_to/output_parser_fixing/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/output_fixing/"]},
|
||||
"/docs/modules/model_io/models/output_parsers/types/pandas_dataframe/": {"canonical": "/docs/how_to/output_parser_structured/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/pandas_dataframe/"]},
|
||||
"/docs/modules/model_io/models/output_parsers/types/pydantic/": {"canonical": "/docs/how_to/output_parser_structured/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/pydantic/"]},
|
||||
"/docs/modules/model_io/models/output_parsers/types/retry/": {"canonical": "/docs/how_to/output_parser_retry/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/retry/"]},
|
||||
"/docs/modules/model_io/models/output_parsers/types/structured/": {"canonical": "/docs/how_to/output_parser_structured/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/structured/"]},
|
||||
"/docs/modules/model_io/models/output_parsers/types/xml/": {"canonical": "/docs/how_to/output_parser_xml/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/xml/"]},
|
||||
"/docs/modules/model_io/models/output_parsers/types/yaml/": {"canonical": "/docs/how_to/output_parser_yaml/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/yaml/"]},
|
||||
"/docs/modules/model_io/models/prompts/": {"canonical": "/docs/how_to/#prompt-templates", "alternative": ["/v0.1/docs/modules/model_io/prompts/"]},
|
||||
"/docs/modules/model_io/models/prompts/composition/": {"canonical": "/docs/how_to/prompts_composition/", "alternative": ["/v0.1/docs/modules/model_io/prompts/composition/"]},
|
||||
"/docs/modules/model_io/models/prompts/example_selectors/": {"canonical": "/docs/how_to/example_selectors/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/"]},
|
||||
"/docs/modules/model_io/models/prompts/example_selectors/length_based/": {"canonical": "/docs/how_to/example_selectors_length_based/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/length_based/"]},
|
||||
"/docs/modules/model_io/models/prompts/example_selectors/mmr/": {"canonical": "/docs/how_to/example_selectors_mmr/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/mmr/"]},
|
||||
"/docs/modules/model_io/models/prompts/example_selectors/ngram_overlap/": {"canonical": "/docs/how_to/example_selectors_ngram/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/ngram_overlap/"]},
|
||||
"/docs/modules/model_io/models/prompts/example_selectors/similarity/": {"canonical": "/docs/how_to/example_selectors_similarity/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/similarity/"]},
|
||||
"/docs/modules/model_io/models/prompts/few_shot_examples_chat/": {"canonical": "/docs/how_to/few_shot_examples_chat/", "alternative": ["/v0.1/docs/modules/model_io/prompts/few_shot_examples_chat/"]},
|
||||
"/docs/modules/model_io/models/prompts/few_shot_examples/": {"canonical": "/docs/how_to/few_shot_examples/", "alternative": ["/v0.1/docs/modules/model_io/prompts/few_shot_examples/"]},
|
||||
"/docs/modules/model_io/models/prompts/partial/": {"canonical": "/docs/how_to/prompts_partial/", "alternative": ["/v0.1/docs/modules/model_io/prompts/partial/"]},
|
||||
"/docs/modules/model_io/models/prompts/quick_start/": {"canonical": "/docs/how_to/#prompt-templates", "alternative": ["/v0.1/docs/modules/model_io/prompts/quick_start/"]},
|
||||
"/docs/modules/model_io/models/quick_start/": {"canonical": "/docs/tutorials/llm_chain/", "alternative": ["/v0.1/docs/modules/model_io/quick_start/"]},
|
||||
"/docs/use_cases/more/graph/": {"canonical": "/docs/tutorials/graph/", "alternative": ["/v0.1/docs/use_cases/graph/"]},
|
||||
"/docs/use_cases/more/graph/constructing/": {"canonical": "/docs/tutorials/graph/", "alternative": ["/v0.1/docs/use_cases/graph/constructing/"]},
|
||||
"/docs/use_cases/more/graph/mapping/": {"canonical": "/docs/tutorials/graph/", "alternative": ["/v0.1/docs/use_cases/graph/mapping/"]},
|
||||
"/docs/use_cases/more/graph/prompting/": {"canonical": "/docs/tutorials/graph/", "alternative": ["/v0.1/docs/use_cases/graph/prompting/"]},
|
||||
"/docs/use_cases/more/graph/quickstart/": {"canonical": "/docs/tutorials/graph/", "alternative": ["/v0.1/docs/use_cases/graph/quickstart/"]},
|
||||
"/docs/use_cases/more/graph/semantic/": {"canonical": "/docs/tutorials/graph/", "alternative": ["/v0.1/docs/use_cases/graph/semantic/"]},
|
||||
"/docs/modules/model_io/chat/how_to/": {"canonical": "/docs/how_to/#chat-models", "alternative": ["/v0.1/docs/modules/model_io/chat/"]},
|
||||
"/docs/modules/model_io/chat/how_to/chat_model_caching/": {"canonical": "/docs/how_to/chat_model_caching/", "alternative": ["/v0.1/docs/modules/model_io/chat/chat_model_caching/"]},
|
||||
"/docs/modules/model_io/chat/how_to/custom_chat_model/": {"canonical": "/docs/how_to/custom_chat_model/", "alternative": ["/v0.1/docs/modules/model_io/chat/custom_chat_model/"]},
|
||||
"/docs/modules/model_io/chat/how_to/function_calling/": {"canonical": "/docs/how_to/tool_calling/", "alternative": ["/v0.1/docs/modules/model_io/chat/function_calling/"]},
|
||||
"/docs/modules/model_io/chat/how_to/logprobs/": {"canonical": "/docs/how_to/logprobs/", "alternative": ["/v0.1/docs/modules/model_io/chat/logprobs/"]},
|
||||
"/docs/modules/model_io/chat/how_to/message_types/": {"canonical": "/docs/concepts/#messages", "alternative": ["/v0.1/docs/modules/model_io/chat/message_types/"]},
|
||||
"/docs/modules/model_io/chat/how_to/quick_start/": {"canonical": "/docs/tutorials/llm_chain/", "alternative": ["/v0.1/docs/modules/model_io/chat/quick_start/"]},
|
||||
"/docs/modules/model_io/chat/how_to/response_metadata/": {"canonical": "/docs/how_to/response_metadata/", "alternative": ["/v0.1/docs/modules/model_io/chat/response_metadata/"]},
|
||||
"/docs/modules/model_io/chat/how_to/streaming/": {"canonical": "/docs/how_to/streaming/", "alternative": ["/v0.1/docs/modules/model_io/chat/streaming/"]},
|
||||
"/docs/modules/model_io/chat/how_to/structured_output/": {"canonical": "/docs/how_to/structured_output/", "alternative": ["/v0.1/docs/modules/model_io/chat/structured_output/"]},
|
||||
"/docs/modules/model_io/chat/how_to/token_usage_tracking/": {"canonical": "/docs/how_to/chat_token_usage_tracking/", "alternative": ["/v0.1/docs/modules/model_io/chat/token_usage_tracking/"]},
|
||||
"/docs/modules/model_io/llms/how_to/": {"canonical": "/docs/concepts/#llms", "alternative": ["/v0.1/docs/modules/model_io/llms/"]},
|
||||
"/docs/modules/model_io/llms/how_to/custom_llm/": {"canonical": "/docs/how_to/custom_llm/", "alternative": ["/v0.1/docs/modules/model_io/llms/custom_llm/"]},
|
||||
"/docs/modules/model_io/llms/how_to/llm_caching/": {"canonical": "/docs/how_to/llm_caching/", "alternative": ["/v0.1/docs/modules/model_io/llms/llm_caching/"]},
|
||||
"/docs/modules/model_io/llms/how_to/quick_start/": {"canonical": "/docs/tutorials/llm_chain/", "alternative": ["/v0.1/docs/modules/model_io/llms/quick_start/"]},
|
||||
"/docs/modules/model_io/llms/how_to/streaming_llm/": {"canonical": "/docs/how_to/streaming_llm/", "alternative": ["/v0.1/docs/modules/model_io/llms/streaming_llm/"]},
|
||||
"/docs/modules/model_io/llms/how_to/token_usage_tracking/": {"canonical": "/docs/how_to/llm_token_usage_tracking/", "alternative": ["/v0.1/docs/modules/model_io/llms/token_usage_tracking/"]},
|
||||
"/docs/modules/model_io/llms/integrations/llm_caching/": {"canonical": "/docs/how_to/llm_caching/", "alternative": ["/v0.1/docs/integrations/llms/llm_caching/"]},
|
||||
"/docs/modules/model_io/chat/integrations/ollama_functions/": {"canonical": "/docs/integrations/chat/ollama/", "alternative": ["/v0.1/docs/integrations/chat/ollama_functions/"]},
|
||||
"/en/latest/modules/models/": {"canonical": "/docs/how_to/#chat-models", "alternative": ["/v0.1/docs/modules/model_io/"]},
|
||||
"/en/latest/modules/models/chat/": {"canonical": "/docs/how_to/#chat-models", "alternative": ["/v0.1/docs/modules/model_io/chat/"]},
|
||||
"/en/latest/modules/models/chat/chat_model_caching/": {"canonical": "/docs/how_to/chat_model_caching/", "alternative": ["/v0.1/docs/modules/model_io/chat/chat_model_caching/"]},
|
||||
"/en/latest/modules/models/chat/custom_chat_model/": {"canonical": "/docs/how_to/custom_chat_model/", "alternative": ["/v0.1/docs/modules/model_io/chat/custom_chat_model/"]},
|
||||
"/en/latest/modules/models/chat/function_calling/": {"canonical": "/docs/how_to/tool_calling/", "alternative": ["/v0.1/docs/modules/model_io/chat/function_calling/"]},
|
||||
"/en/latest/modules/models/chat/logprobs/": {"canonical": "/docs/how_to/logprobs/", "alternative": ["/v0.1/docs/modules/model_io/chat/logprobs/"]},
|
||||
"/en/latest/modules/models/chat/message_types/": {"canonical": "/docs/concepts/#messages", "alternative": ["/v0.1/docs/modules/model_io/chat/message_types/"]},
|
||||
"/en/latest/modules/models/chat/quick_start/": {"canonical": "/docs/tutorials/llm_chain/", "alternative": ["/v0.1/docs/modules/model_io/chat/quick_start/"]},
|
||||
"/en/latest/modules/models/chat/response_metadata/": {"canonical": "/docs/how_to/response_metadata/", "alternative": ["/v0.1/docs/modules/model_io/chat/response_metadata/"]},
|
||||
"/en/latest/modules/models/chat/streaming/": {"canonical": "/docs/how_to/streaming/", "alternative": ["/v0.1/docs/modules/model_io/chat/streaming/"]},
|
||||
"/en/latest/modules/models/chat/structured_output/": {"canonical": "/docs/how_to/structured_output/", "alternative": ["/v0.1/docs/modules/model_io/chat/structured_output/"]},
|
||||
"/en/latest/modules/models/chat/token_usage_tracking/": {"canonical": "/docs/how_to/chat_token_usage_tracking/", "alternative": ["/v0.1/docs/modules/model_io/chat/token_usage_tracking/"]},
|
||||
"/en/latest/modules/models/concepts/": {"canonical": "/docs/concepts/#chat-models", "alternative": ["/v0.1/docs/modules/model_io/concepts/"]},
|
||||
"/en/latest/modules/models/llms/": {"canonical": "/docs/concepts/#llms", "alternative": ["/v0.1/docs/modules/model_io/llms/"]},
|
||||
"/en/latest/modules/models/llms/custom_llm/": {"canonical": "/docs/how_to/custom_llm/", "alternative": ["/v0.1/docs/modules/model_io/llms/custom_llm/"]},
|
||||
"/en/latest/modules/models/llms/llm_caching/": {"canonical": "/docs/how_to/llm_caching/", "alternative": ["/v0.1/docs/modules/model_io/llms/llm_caching/"]},
|
||||
"/en/latest/modules/models/llms/quick_start/": {"canonical": "/docs/tutorials/llm_chain/", "alternative": ["/v0.1/docs/modules/model_io/llms/quick_start/"]},
|
||||
"/en/latest/modules/models/llms/streaming_llm/": {"canonical": "/docs/how_to/streaming_llm/", "alternative": ["/v0.1/docs/modules/model_io/llms/streaming_llm/"]},
|
||||
"/en/latest/modules/models/llms/token_usage_tracking/": {"canonical": "/docs/how_to/llm_token_usage_tracking/", "alternative": ["/v0.1/docs/modules/model_io/llms/token_usage_tracking/"]},
|
||||
"/en/latest/modules/models/output_parsers/": {"canonical": "/docs/how_to/#output-parsers", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/"]},
|
||||
"/en/latest/modules/models/output_parsers/custom/": {"canonical": "/docs/how_to/output_parser_custom/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/custom/"]},
|
||||
"/en/latest/modules/models/output_parsers/quick_start/": {"canonical": "/docs/how_to/output_parser_structured/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/quick_start/"]},
|
||||
"/en/latest/modules/models/output_parsers/types/csv/": {"canonical": "/docs/how_to/output_parser_structured/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/csv/"]},
|
||||
"/en/latest/modules/models/output_parsers/types/datetime/": {"canonical": "/docs/how_to/output_parser_structured/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/datetime/"]},
|
||||
"/en/latest/modules/models/output_parsers/types/enum/": {"canonical": "/docs/how_to/output_parser_structured/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/enum/"]},
|
||||
"/en/latest/modules/models/output_parsers/types/json/": {"canonical": "/docs/how_to/output_parser_json/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/json/"]},
|
||||
"/en/latest/modules/models/output_parsers/types/openai_functions/": {"canonical": "/docs/how_to/structured_output/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/openai_functions/"]},
|
||||
"/en/latest/modules/models/output_parsers/types/openai_tools/": {"canonical": "/docs/how_to/tool_calling/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/openai_tools/"]},
|
||||
"/en/latest/modules/models/output_parsers/types/output_fixing/": {"canonical": "/docs/how_to/output_parser_fixing/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/output_fixing/"]},
|
||||
"/en/latest/modules/models/output_parsers/types/pandas_dataframe/": {"canonical": "/docs/how_to/output_parser_structured/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/pandas_dataframe/"]},
|
||||
"/en/latest/modules/models/output_parsers/types/pydantic/": {"canonical": "/docs/how_to/output_parser_structured/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/pydantic/"]},
|
||||
"/en/latest/modules/models/output_parsers/types/retry/": {"canonical": "/docs/how_to/output_parser_retry/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/retry/"]},
|
||||
"/en/latest/modules/models/output_parsers/types/structured/": {"canonical": "/docs/how_to/output_parser_structured/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/structured/"]},
|
||||
"/en/latest/modules/models/output_parsers/types/xml/": {"canonical": "/docs/how_to/output_parser_xml/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/xml/"]},
|
||||
"/en/latest/modules/models/output_parsers/types/yaml/": {"canonical": "/docs/how_to/output_parser_yaml/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/yaml/"]},
|
||||
"/en/latest/modules/models/prompts/": {"canonical": "/docs/how_to/#prompt-templates", "alternative": ["/v0.1/docs/modules/model_io/prompts/"]},
|
||||
"/en/latest/modules/models/prompts/composition/": {"canonical": "/docs/how_to/prompts_composition/", "alternative": ["/v0.1/docs/modules/model_io/prompts/composition/"]},
|
||||
"/en/latest/modules/models/prompts/example_selectors/": {"canonical": "/docs/how_to/example_selectors/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/"]},
|
||||
"/en/latest/modules/models/prompts/example_selectors/length_based/": {"canonical": "/docs/how_to/example_selectors_length_based/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/length_based/"]},
|
||||
"/en/latest/modules/models/prompts/example_selectors/mmr/": {"canonical": "/docs/how_to/example_selectors_mmr/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/mmr/"]},
|
||||
"/en/latest/modules/models/prompts/example_selectors/ngram_overlap/": {"canonical": "/docs/how_to/example_selectors_ngram/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/ngram_overlap/"]},
|
||||
"/en/latest/modules/models/prompts/example_selectors/similarity/": {"canonical": "/docs/how_to/example_selectors_similarity/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/similarity/"]},
|
||||
"/en/latest/modules/models/prompts/few_shot_examples_chat/": {"canonical": "/docs/how_to/few_shot_examples_chat/", "alternative": ["/v0.1/docs/modules/model_io/prompts/few_shot_examples_chat/"]},
|
||||
"/en/latest/modules/models/prompts/few_shot_examples/": {"canonical": "/docs/how_to/few_shot_examples/", "alternative": ["/v0.1/docs/modules/model_io/prompts/few_shot_examples/"]},
|
||||
"/en/latest/modules/models/prompts/partial/": {"canonical": "/docs/how_to/prompts_partial/", "alternative": ["/v0.1/docs/modules/model_io/prompts/partial/"]},
|
||||
"/en/latest/modules/models/prompts/quick_start/": {"canonical": "/docs/how_to/#prompt-templates", "alternative": ["/v0.1/docs/modules/model_io/prompts/quick_start/"]},
|
||||
"/en/latest/modules/models/quick_start/": {"canonical": "/docs/tutorials/llm_chain/", "alternative": ["/v0.1/docs/modules/model_io/quick_start/"]},
|
||||
"/docs/modules/model_io/prompts/example_selector_types/": {"canonical": "/docs/how_to/example_selectors/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/"]},
|
||||
"/docs/modules/model_io/prompts/example_selector_types/length_based/": {"canonical": "/docs/how_to/example_selectors_length_based/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/length_based/"]},
|
||||
"/docs/modules/model_io/prompts/example_selector_types/mmr/": {"canonical": "/docs/how_to/example_selectors_mmr/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/mmr/"]},
|
||||
"/docs/modules/model_io/prompts/example_selector_types/ngram_overlap/": {"canonical": "/docs/how_to/example_selectors_ngram/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/ngram_overlap/"]},
|
||||
"/docs/modules/model_io/prompts/example_selector_types/similarity/": {"canonical": "/docs/how_to/example_selectors_similarity/", "alternative": ["/v0.1/docs/modules/model_io/prompts/example_selectors/similarity/"]},
|
||||
"/docs/modules/agents/tools/": {"canonical": "/docs/how_to/#tools", "alternative": ["/v0.1/docs/modules/tools/"]},
|
||||
"/docs/modules/agents/tools/custom_tools/": {"canonical": "/docs/how_to/custom_tools/", "alternative": ["/v0.1/docs/modules/tools/custom_tools/"]},
|
||||
"/docs/modules/agents/tools/toolkits/": {"canonical": "/docs/how_to/#tools", "alternative": ["/v0.1/docs/modules/tools/toolkits/"]},
|
||||
"/docs/modules/agents/tools/tools_as_openai_functions/": {"canonical": "/docs/how_to/tool_calling/", "alternative": ["/v0.1/docs/modules/tools/tools_as_openai_functions/"]},
|
||||
"/docs/guides/deployments/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/deployments/"]},
|
||||
"/docs/guides/deployments/template_repos/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/deployments/template_repos/"]},
|
||||
"/docs/guides/evaluation/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/"]},
|
||||
"/docs/guides/evaluation/comparison/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/comparison/"]},
|
||||
"/docs/guides/evaluation/comparison/custom/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/comparison/custom/"]},
|
||||
"/docs/guides/evaluation/comparison/pairwise_embedding_distance/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/comparison/pairwise_embedding_distance/"]},
|
||||
"/docs/guides/evaluation/comparison/pairwise_string/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/comparison/pairwise_string/"]},
|
||||
"/docs/guides/evaluation/examples/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/examples/"]},
|
||||
"/docs/guides/evaluation/examples/comparisons/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/examples/comparisons/"]},
|
||||
"/docs/guides/evaluation/string/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/string/"]},
|
||||
"/docs/guides/evaluation/string/criteria_eval_chain/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/string/criteria_eval_chain/"]},
|
||||
"/docs/guides/evaluation/string/custom/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/string/custom/"]},
|
||||
"/docs/guides/evaluation/string/embedding_distance/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/string/embedding_distance/"]},
|
||||
"/docs/guides/evaluation/string/exact_match/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/string/exact_match/"]},
|
||||
"/docs/guides/evaluation/string/json/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/string/json/"]},
|
||||
"/docs/guides/evaluation/string/regex_match/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/string/regex_match/"]},
|
||||
"/docs/guides/evaluation/string/scoring_eval_chain/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/string/scoring_eval_chain/"]},
|
||||
"/docs/guides/evaluation/string/string_distance/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/string/string_distance/"]},
|
||||
"/docs/guides/evaluation/trajectory/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/trajectory/"]},
|
||||
"/docs/guides/evaluation/trajectory/custom/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/trajectory/custom/"]},
|
||||
"/docs/guides/evaluation/trajectory/trajectory_eval/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/trajectory/trajectory_eval/"]},
|
||||
"/docs/guides/privacy/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/"]},
|
||||
"/docs/guides/privacy/amazon_comprehend_chain/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/amazon_comprehend_chain/"]},
|
||||
"/docs/guides/privacy/constitutional_chain/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/constitutional_chain/"]},
|
||||
"/docs/guides/privacy/hugging_face_prompt_injection/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/hugging_face_prompt_injection/"]},
|
||||
"/docs/guides/privacy/layerup_security/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/layerup_security/"]},
|
||||
"/docs/guides/privacy/logical_fallacy_chain/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/logical_fallacy_chain/"]},
|
||||
"/docs/guides/privacy/moderation/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/moderation/"]},
|
||||
"/docs/guides/privacy/presidio_data_anonymization/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/presidio_data_anonymization/"]},
|
||||
"/docs/guides/privacy/presidio_data_anonymization/multi_language/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/presidio_data_anonymization/multi_language/"]},
|
||||
"/docs/guides/privacy/presidio_data_anonymization/qa_privacy_protection/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/presidio_data_anonymization/qa_privacy_protection/"]},
|
||||
"/docs/guides/privacy/presidio_data_anonymization/reversible/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/presidio_data_anonymization/reversible/"]},
|
||||
"/docs/guides/safety/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/"]},
|
||||
"/docs/guides/safety/amazon_comprehend_chain/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/amazon_comprehend_chain/"]},
|
||||
"/docs/guides/safety/constitutional_chain/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/constitutional_chain/"]},
|
||||
"/docs/guides/safety/hugging_face_prompt_injection/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/hugging_face_prompt_injection/"]},
|
||||
"/docs/guides/safety/layerup_security/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/layerup_security/"]},
|
||||
"/docs/guides/safety/logical_fallacy_chain/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/logical_fallacy_chain/"]},
|
||||
"/docs/guides/safety/moderation/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/moderation/"]},
|
||||
"/docs/guides/safety/presidio_data_anonymization/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/presidio_data_anonymization/"]},
|
||||
"/docs/guides/safety/presidio_data_anonymization/multi_language/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/presidio_data_anonymization/multi_language/"]},
|
||||
"/docs/guides/safety/presidio_data_anonymization/qa_privacy_protection/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/presidio_data_anonymization/qa_privacy_protection/"]},
|
||||
"/docs/guides/safety/presidio_data_anonymization/reversible/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/presidio_data_anonymization/reversible/"]},
|
||||
"/docs/integrations/llms/titan_takeoff_pro/": {"canonical": "/docs/integrations/llms/titan_takeoff/"},
|
||||
"/docs/integrations/providers/optimum_intel/": {"canonical": "/docs/integrations/providers/intel/"},
|
||||
"/docs/use_cases/graph/integrations/diffbot_graphtransformer/": {"canonical": "/docs/integrations/graphs/diffbot/"},
|
||||
"/docs/use_cases/graph/integrations/graph_arangodb_qa/": {"canonical": "/docs/integrations/graphs/arangodb/"},
|
||||
"/docs/use_cases/graph/integrations/graph_cypher_qa/": {"canonical": "/docs/integrations/graphs/neo4j_cypher/"},
|
||||
"/docs/use_cases/graph/integrations/graph_falkordb_qa/": {"canonical": "/docs/integrations/graphs/falkordb/"},
|
||||
"/docs/use_cases/graph/integrations/graph_gremlin_cosmosdb_qa/": {"canonical": "/docs/integrations/graphs/azure_cosmosdb_gremlin/"},
|
||||
"/docs/use_cases/graph/integrations/graph_hugegraph_qa/": {"canonical": "/docs/integrations/graphs/hugegraph/"},
|
||||
"/docs/use_cases/graph/integrations/graph_kuzu_qa/": {"canonical": "/docs/integrations/graphs/kuzu_db/"},
|
||||
"/docs/use_cases/graph/integrations/graph_memgraph_qa/": {"canonical": "/docs/integrations/graphs/memgraph/"},
|
||||
"/docs/use_cases/graph/integrations/graph_nebula_qa/": {"canonical": "/docs/integrations/graphs/nebula_graph/"},
|
||||
"/docs/use_cases/graph/integrations/graph_networkx_qa/": {"canonical": "/docs/integrations/graphs/networkx/"},
|
||||
"/docs/use_cases/graph/integrations/graph_ontotext_graphdb_qa/": {"canonical": "/docs/integrations/graphs/ontotext/"},
|
||||
"/docs/use_cases/graph/integrations/graph_sparql_qa/": {"canonical": "/docs/integrations/graphs/rdflib_sparql/"},
|
||||
"/docs/use_cases/graph/integrations/neptune_cypher_qa/": {"canonical": "/docs/integrations/graphs/amazon_neptune_open_cypher/"},
|
||||
"/docs/use_cases/graph/integrations/neptune_sparql_qa/": {"canonical": "/docs/integrations/graphs/amazon_neptune_sparql/"},
|
||||
"/docs/integrations/providers/facebook_chat/": {"canonical": "/docs/integrations/providers/facebook/"},
|
||||
"/docs/integrations/providers/facebook_faiss/": {"canonical": "/docs/integrations/providers/facebook/"},
|
||||
"/docs/integrations/memory/google_cloud_sql_mssql/": {"canonical": "/docs/integrations/memory/google_sql_mssql/"},
|
||||
"/docs/integrations/memory/google_cloud_sql_mysql/": {"canonical": "/docs/integrations/memory/google_sql_mysql/"},
|
||||
"/docs/integrations/memory/google_cloud_sql_pg/": {"canonical": "/docs/integrations/memory/google_sql_pg/"},
|
||||
"/docs/integrations/memory/google_datastore/": {"canonical": "/docs/integrations/memory/google_firestore_datastore/"},
|
||||
"/docs/integrations/llms/huggingface_textgen_inference/": {"canonical": "/docs/integrations/llms/huggingface_endpoint/"},
|
||||
"/docs/integrations/llms/huggingface_hub/": {"canonical": "/docs/integrations/llms/huggingface_endpoint/"},
|
||||
"/docs/integrations/llms/bigdl/": {"canonical": "/docs/integrations/llms/ipex_llm/"},
|
||||
"/docs/integrations/llms/watsonxllm/": {"canonical": "/docs/integrations/llms/ibm_watsonx/"},
|
||||
"/docs/integrations/llms/pai_eas_endpoint/": {"canonical": "/docs/integrations/llms/alibabacloud_pai_eas_endpoint/"},
|
||||
"/docs/integrations/vectorstores/hanavector/": {"canonical": "/docs/integrations/vectorstores/sap_hanavector/"},
|
||||
"/docs/use_cases/qa_structured/sql/": {"canonical": "/docs/tutorials/sql_qa/", "alternative": ["/v0.1/docs/use_cases/sql/"]},
|
||||
"/docs/contributing/packages/": {"canonical": "/docs/versions/release_policy/", "alternative": ["/v0.1/docs/packages/"]},
|
||||
"/docs/community/": {"canonical": "/docs/contributing/"},
|
||||
"/docs/modules/chains/(.+)/": {"canonical": "/docs/versions/migrating_chains/", "alternative": ["/v0.1/docs/modules/chains/"]},
|
||||
"/docs/modules/agents/how_to/custom_llm_agent/": {"canonical": "/docs/how_to/migrate_agent/", "alternative": ["/v0.1/docs/modules/agents/how_to/custom_agent/"]},
|
||||
"/docs/modules/agents/how_to/custom-functions-with-openai-functions-agent/": {"canonical": "/docs/how_to/migrate_agent/", "alternative": ["/v0.1/docs/modules/agents/how_to/custom_agent/"]},
|
||||
"/docs/modules/agents/how_to/custom_llm_chat_agent/": {"canonical": "/docs/how_to/migrate_agent/", "alternative": ["/v0.1/docs/modules/agents/how_to/custom_agent/"]},
|
||||
"/docs/modules/agents/how_to/custom_mrkl_agent/": {"canonical": "/docs/how_to/migrate_agent/", "alternative": ["/v0.1/docs/modules/agents/how_to/custom_agent/"]},
|
||||
"/docs/modules/agents/how_to/streaming_stdout_final_only/": {"canonical": "/docs/how_to/migrate_agent/", "alternative": ["/v0.1/docs/modules/agents/how_to/streaming/"]},
|
||||
"/docs/modules/model_io/prompts/prompts_pipelining/": {"canonical": "/docs/how_to/prompts_composition/", "alternative": ["/v0.1/docs/modules/model_io/prompts/composition/"]},
|
||||
"/docs/modules/model_io/output_parsers/enum/": {"canonical": "/docs/how_to/output_parser_structured/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/enum/"]},
|
||||
"/docs/modules/model_io/output_parsers/pandas_dataframe/": {"canonical": "/docs/how_to/output_parser_structured/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/pandas_dataframe/"]},
|
||||
"/docs/modules/model_io/output_parsers/structured/": {"canonical": "/docs/how_to/output_parser_structured/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/structured/"]},
|
||||
"/docs/modules/model_io/output_parsers/xml/": {"canonical": "/docs/how_to/output_parser_xml/", "alternative": ["/v0.1/docs/modules/model_io/output_parsers/types/xml/"]},
|
||||
"/docs/use_cases/question_answering/code_understanding/": {"canonical": "https://langchain-ai.github.io/langgraph/tutorials/code_assistant/langgraph_code_assistant/", "alternative": ["/v0.1/docs/use_cases/code_understanding/"]},
|
||||
"/docs/use_cases/question_answering/document-context-aware-QA/": {"canonical": "/docs/how_to/#text-splitters", "alternative": ["/v0.1/docs/modules/data_connection/document_transformers/"]},
|
||||
"/docs/integrations/providers/alibabacloud_opensearch/": {"canonical": "/docs/integrations/providers/alibaba_cloud/"},
|
||||
"/docs/integrations/chat/pai_eas_chat_endpoint/": {"canonical": "/docs/integrations/chat/alibaba_cloud_pai_eas/"},
|
||||
"/docs/integrations/providers/tencentvectordb/": {"canonical": "/docs/integrations/providers/tencent/"},
|
||||
"/docs/integrations/chat/hunyuan/": {"canonical": "/docs/integrations/chat/tencent_hunyuan/"},
|
||||
"/docs/integrations/document_loaders/excel/": {"canonical": "/docs/integrations/document_loaders/microsoft_excel/"},
|
||||
"/docs/integrations/document_loaders/onenote/": {"canonical": "/docs/integrations/document_loaders/microsoft_onenote/"},
|
||||
"/docs/integrations/providers/aws_dynamodb/": {"canonical": "/docs/integrations/platforms/aws/"},
|
||||
"/docs/integrations/providers/scann/": {"canonical": "/docs/integrations/platforms/google/"},
|
||||
"/docs/integrations/toolkits/google_drive/": {"canonical": "/docs/integrations/tools/google_drive/"},
|
||||
"/docs/use_cases/question_answering/chat_vector_db/": {"canonical": "/docs/tutorials/rag/", "alternative": ["/v0.1/docs/use_cases/question_answering/"]},
|
||||
"/docs/use_cases/question_answering/in_memory_question_answering/": {"canonical": "/docs/tutorials/rag/", "alternative": ["/v0.1/docs/use_cases/question_answering/"]},
|
||||
"/docs/use_cases/question_answering/multi_retrieval_qa_router/": {"canonical": "/docs/tutorials/rag/", "alternative": ["/v0.1/docs/use_cases/question_answering/"]},
|
||||
"/docs/use_cases/question_answering/multiple_retrieval/": {"canonical": "/docs/tutorials/rag/", "alternative": ["/v0.1/docs/use_cases/question_answering/"]},
|
||||
"/docs/use_cases/question_answering/vector_db_qa/": {"canonical": "/docs/tutorials/rag/", "alternative": ["/v0.1/docs/use_cases/question_answering/"]},
|
||||
"/docs/use_cases/question_answering/vector_db_text_generation/": {"canonical": "/docs/tutorials/rag/", "alternative": ["/v0.1/docs/use_cases/question_answering/"]},
|
||||
"/docs/guides/langsmith/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/langsmith/"]},
|
||||
"/docs/guides/langsmith/walkthrough/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/langsmith/walkthrough/"]},
|
||||
"/docs/use_cases/qa_structured/integrations/sqlite/": {"canonical": "/docs/tutorials/sql_qa/", "alternative": ["/v0.1/docs/use_cases/sql/"]},
|
||||
"/docs/use_cases/more/data_generation/": {"canonical": "/docs/tutorials/data_generation/", "alternative": ["/v0.1/docs/use_cases/data_generation/"]},
|
||||
"/docs/use_cases/question_answering/how_to/chat_vector_db/": {"canonical": "/docs/tutorials/rag/", "alternative": ["/v0.1/docs/use_cases/question_answering/"]},
|
||||
"/docs/use_cases/question_answering/how_to/conversational_retrieval_agents/": {"canonical": "/docs/tutorials/qa_chat_history/", "alternative": ["/v0.1/docs/use_cases/question_answering/conversational_retrieval_agents/"]},
|
||||
"/docs/use_cases/question_answering/question_answering/": {"canonical": "/docs/tutorials/rag/", "alternative": ["/v0.1/docs/use_cases/question_answering/"]},
|
||||
"/docs/use_cases/question_answering/how_to/local_retrieval_qa/": {"canonical": "/docs/tutorials/rag/", "alternative": ["/v0.1/docs/use_cases/question_answering/local_retrieval_qa/"]},
|
||||
"/docs/use_cases/question_answering/how_to/question_answering/": {"canonical": "/docs/tutorials/rag/", "alternative": ["/v0.1/docs/use_cases/question_answering/"]},
|
||||
"/docs/modules/agents/agents/examples/mrkl_chat(.html?)/": {"canonical": "/docs/how_to/#agents", "alternative": ["/v0.1/docs/modules/agents/"]},
|
||||
"/docs/integrations/": {"canonical": "/docs/integrations/providers/"},
|
||||
"/docs/expression_language/cookbook/routing/": {"canonical": "/docs/how_to/routing/", "alternative": ["/v0.1/docs/expression_language/how_to/routing/"]},
|
||||
"/docs/guides/expression_language/": {"canonical": "/docs/how_to/#langchain-expression-language-lcel", "alternative": ["/v0.1/docs/expression_language/"]},
|
||||
"/docs/integrations/providers/amazon_api_gateway/": {"canonical": "/docs/integrations/platforms/aws/"},
|
||||
"/docs/integrations/providers/huggingface/": {"canonical": "/docs/integrations/platforms/huggingface/"},
|
||||
"/docs/integrations/providers/azure_blob_storage/": {"canonical": "/docs/integrations/platforms/microsoft/"},
|
||||
"/docs/integrations/providers/google_vertexai_matchingengine/": {"canonical": "/docs/integrations/platforms/google/"},
|
||||
"/docs/integrations/providers/aws_s3/": {"canonical": "/docs/integrations/platforms/aws/"},
|
||||
"/docs/integrations/providers/azure_openai/": {"canonical": "/docs/integrations/platforms/microsoft/"},
|
||||
"/docs/integrations/providers/azure_cognitive_search_/": {"canonical": "/docs/integrations/platforms/microsoft/"},
|
||||
"/docs/integrations/providers/bedrock/": {"canonical": "/docs/integrations/platforms/aws/"},
|
||||
"/docs/integrations/providers/google_bigquery/": {"canonical": "/docs/integrations/platforms/google/"},
|
||||
"/docs/integrations/providers/google_cloud_storage/": {"canonical": "/docs/integrations/platforms/google/"},
|
||||
"/docs/integrations/providers/google_drive/": {"canonical": "/docs/integrations/platforms/google/"},
|
||||
"/docs/integrations/providers/google_search/": {"canonical": "/docs/integrations/platforms/google/"},
|
||||
"/docs/integrations/providers/microsoft_onedrive/": {"canonical": "/docs/integrations/platforms/microsoft/"},
|
||||
"/docs/integrations/providers/microsoft_powerpoint/": {"canonical": "/docs/integrations/platforms/microsoft/"},
|
||||
"/docs/integrations/providers/microsoft_word/": {"canonical": "/docs/integrations/platforms/microsoft/"},
|
||||
"/docs/integrations/providers/sagemaker_endpoint/": {"canonical": "/docs/integrations/platforms/aws/"},
|
||||
"/docs/integrations/providers/sagemaker_tracking/": {"canonical": "/docs/integrations/callbacks/sagemaker_tracking/"},
|
||||
"/docs/integrations/providers/openai/": {"canonical": "/docs/integrations/platforms/openai/"},
|
||||
"/docs/integrations/cassandra/": {"canonical": "/docs/integrations/providers/cassandra/"},
|
||||
"/docs/integrations/providers/providers/semadb/": {"canonical": "/docs/integrations/providers/semadb/"},
|
||||
"/docs/integrations/vectorstores/vectorstores/semadb/": {"canonical": "/docs/integrations/vectorstores/semadb/"},
|
||||
"/docs/integrations/vectorstores/async_faiss/": {"canonical": "/docs/integrations/vectorstores/faiss_async/"},
|
||||
"/docs/integrations/vectorstores/matchingengine/": {"canonical": "/docs/integrations/vectorstores/google_vertex_ai_vector_search/"},
|
||||
"/docs/integrations/tools/sqlite/": {"canonical": "/docs/tutorials/sql_qa/", "alternative": ["/v0.1/docs/use_cases/sql/"]},
|
||||
"/docs/integrations/document_loaders/pdf-amazonTextractPDFLoader/": {"canonical": "/docs/integrations/document_loaders/amazon_textract/"},
|
||||
"/docs/integrations/document_loaders/Etherscan/": {"canonical": "/docs/integrations/document_loaders/etherscan/"},
|
||||
"/docs/integrations/document_loaders/merge_doc_loader/": {"canonical": "/docs/integrations/document_loaders/merge_doc/"},
|
||||
"/docs/integrations/document_loaders/recursive_url_loader/": {"canonical": "/docs/integrations/document_loaders/recursive_url/"},
|
||||
"/docs/integrations/providers/google_document_ai/": {"canonical": "/docs/integrations/platforms/google/"},
|
||||
"/docs/integrations/memory/motorhead_memory_managed/": {"canonical": "/docs/integrations/memory/motorhead_memory/"},
|
||||
"/docs/integrations/memory/dynamodb_chat_message_history/": {"canonical": "/docs/integrations/memory/aws_dynamodb/"},
|
||||
"/docs/integrations/memory/entity_memory_with_sqlite/": {"canonical": "/docs/integrations/memory/sqlite/"},
|
||||
"/docs/modules/model_io/chat/integrations/anthropic/": {"canonical": "/docs/integrations/chat/anthropic/"},
|
||||
"/docs/modules/model_io/chat/integrations/azure_chat_openai/": {"canonical": "/docs/integrations/chat/azure_chat_openai/"},
|
||||
"/docs/modules/model_io/chat/integrations/google_vertex_ai_palm/": {"canonical": "/docs/integrations/chat/google_vertex_ai_palm/"},
|
||||
"/docs/modules/model_io/chat/integrations/openai/": {"canonical": "/docs/integrations/chat/openai/"},
|
||||
"/docs/modules/model_io/chat/integrations/promptlayer_chatopenai/": {"canonical": "/docs/integrations/chat/promptlayer_chatopenai/"},
|
||||
"/docs/modules/model_io/llms/integrations/ai21/": {"canonical": "/docs/integrations/llms/ai21/"},
|
||||
"/docs/modules/model_io/llms/integrations/aleph_alpha/": {"canonical": "/docs/integrations/llms/aleph_alpha/"},
|
||||
"/docs/modules/model_io/llms/integrations/anyscale/": {"canonical": "/docs/integrations/llms/anyscale/"},
|
||||
"/docs/modules/model_io/llms/integrations/banana/": {"canonical": "/docs/integrations/llms/banana/"},
|
||||
"/docs/modules/model_io/llms/integrations/baseten/": {"canonical": "/docs/integrations/llms/baseten/"},
|
||||
"/docs/modules/model_io/llms/integrations/beam/": {"canonical": "/docs/integrations/llms/beam/"},
|
||||
"/docs/modules/model_io/llms/integrations/bedrock/": {"canonical": "/docs/integrations/llms/bedrock/"},
|
||||
"/docs/modules/model_io/llms/integrations/cohere/": {"canonical": "/docs/integrations/llms/cohere/"},
|
||||
"/docs/modules/model_io/llms/integrations/ctransformers/": {"canonical": "/docs/integrations/llms/ctransformers/"},
|
||||
"/docs/modules/model_io/llms/integrations/databricks/": {"canonical": "/docs/integrations/llms/databricks/"},
|
||||
"/docs/modules/model_io/llms/integrations/google_vertex_ai_palm/": {"canonical": "/docs/integrations/llms/google_vertex_ai_palm/"},
|
||||
"/docs/modules/model_io/llms/integrations/huggingface_pipelines/": {"canonical": "/docs/integrations/llms/huggingface_pipelines/"},
|
||||
"/docs/modules/model_io/llms/integrations/jsonformer_experimental/": {"canonical": "/docs/integrations/llms/jsonformer_experimental/"},
|
||||
"/docs/modules/model_io/llms/integrations/llamacpp/": {"canonical": "/docs/integrations/llms/llamacpp/"},
|
||||
"/docs/modules/model_io/llms/integrations/manifest/": {"canonical": "/docs/integrations/llms/manifest/"},
|
||||
"/docs/modules/model_io/llms/integrations/modal/": {"canonical": "/docs/integrations/llms/modal/"},
|
||||
"/docs/modules/model_io/llms/integrations/mosaicml/": {"canonical": "/docs/integrations/llms/mosaicml/"},
|
||||
"/docs/modules/model_io/llms/integrations/nlpcloud/": {"canonical": "/docs/integrations/llms/nlpcloud/"},
|
||||
"/docs/modules/model_io/llms/integrations/openai/": {"canonical": "/docs/integrations/llms/openai/"},
|
||||
"/docs/modules/model_io/llms/integrations/openlm/": {"canonical": "/docs/integrations/llms/openlm/"},
|
||||
"/docs/modules/model_io/llms/integrations/predictionguard/": {"canonical": "/docs/integrations/llms/predictionguard/"},
|
||||
"/docs/modules/model_io/llms/integrations/promptlayer_openai/": {"canonical": "/docs/integrations/llms/promptlayer_openai/"},
|
||||
"/docs/modules/model_io/llms/integrations/rellm_experimental/": {"canonical": "/docs/integrations/llms/rellm_experimental/"},
|
||||
"/docs/modules/model_io/llms/integrations/replicate/": {"canonical": "/docs/integrations/llms/replicate/"},
|
||||
"/docs/modules/model_io/llms/integrations/runhouse/": {"canonical": "/docs/integrations/llms/runhouse/"},
|
||||
"/docs/modules/model_io/llms/integrations/sagemaker/": {"canonical": "/docs/integrations/llms/sagemaker/"},
|
||||
"/docs/modules/model_io/llms/integrations/stochasticai/": {"canonical": "/docs/integrations/llms/stochasticai/"},
|
||||
"/docs/modules/model_io/llms/integrations/writer/": {"canonical": "/docs/integrations/llms/writer/"},
|
||||
"/en/latest/use_cases/apis.html/": {"canonical": null, "alternative": ["/v0.1/docs/use_cases/apis/"]},
|
||||
"/en/latest/use_cases/extraction.html/": {"canonical": "/docs/tutorials/extraction/", "alternative": ["/v0.1/docs/use_cases/extraction/"]},
|
||||
"/en/latest/use_cases/summarization.html/": {"canonical": "/docs/tutorials/summarization/", "alternative": ["/v0.1/docs/use_cases/summarization/"]},
|
||||
"/en/latest/use_cases/tabular.html/": {"canonical": "/docs/tutorials/sql_qa/", "alternative": ["/v0.1/docs/use_cases/sql/"]},
|
||||
"/en/latest/youtube.html/": {"canonical": "/docs/additional_resources/youtube/"},
|
||||
"/docs/": {"canonical": "/"},
|
||||
"/en/latest/": {"canonical": "/"},
|
||||
"/en/latest/index.html/": {"canonical": "/"},
|
||||
"/en/latest/modules/models.html/": {"canonical": "/docs/how_to/#chat-models", "alternative": ["/v0.1/docs/modules/model_io/"]},
|
||||
"/docs/integrations/retrievers/google_cloud_enterprise_search/": {"canonical": "/docs/integrations/retrievers/google_vertex_ai_search/"},
|
||||
"/docs/integrations/tools/metaphor_search/": {"canonical": "/docs/integrations/tools/exa_search/"},
|
||||
"/docs/expression_language/how_to/fallbacks/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/fallbacks/"]},
|
||||
"/docs/expression_language/cookbook/retrieval/": {"canonical": "/docs/tutorials/rag/", "alternative": ["/v0.1/docs/use_cases/question_answering/"]},
|
||||
"/docs/expression_language/cookbook/agent/": {"canonical": "/docs/how_to/migrate_agent/", "alternative": ["/v0.1/docs/modules/agents/agent_types/xml_agent/"]},
|
||||
"/docs/modules/model_io/prompts/message_prompts/": {"canonical": "/docs/how_to/#prompt-templates", "alternative": ["/v0.1/docs/modules/model_io/prompts/quick_start/"]},
|
||||
"/docs/modules/model_io/prompts/pipeline/": {"canonical": "/docs/how_to/prompts_composition/", "alternative": ["/v0.1/docs/modules/model_io/prompts/composition/"]},
|
||||
"/docs/expression_language/cookbook/memory/": {"canonical": "/docs/how_to/chatbots_memory/", "alternative": ["/v0.1/docs/modules/memory/"]},
|
||||
"/docs/expression_language/cookbook/tools/": {"canonical": "/docs/tutorials/agents/", "alternative": ["/v0.1/docs/use_cases/tool_use/quickstart/"]},
|
||||
"/docs/expression_language/cookbook/sql_db/": {"canonical": "/docs/tutorials/sql_qa/", "alternative": ["/v0.1/docs/use_cases/sql/quickstart/"]},
|
||||
"/docs/expression_language/cookbook/moderation/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/safety/moderation/"]},
|
||||
"/docs/expression_language/cookbook/embedding_router/": {"canonical": "/docs/how_to/routing/", "alternative": ["/v0.1/docs/expression_language/how_to/routing/"]},
|
||||
"/docs/guides/structured_output/": {"canonical": "/docs/how_to/structured_output/", "alternative": ["/v0.1/docs/modules/model_io/chat/structured_output/"]},
|
||||
"/docs/modules/agents/how_to/structured_tools/": {"canonical": "/docs/how_to/#tools", "alternative": ["/v0.1/docs/modules/tools/"]},
|
||||
"/docs/use_cases/csv/": {"canonical": "/docs/tutorials/sql_qa/", "alternative": ["/v0.1/docs/use_cases/sql/csv/"]},
|
||||
"/docs/guides/debugging/": {"canonical": "/docs/how_to/debugging/", "alternative": ["/v0.1/docs/guides/development/debugging/"]},
|
||||
"/docs/guides/extending_langchain/": {"canonical": "/docs/how_to/#custom", "alternative": ["/v0.1/docs/guides/development/extending_langchain/"]},
|
||||
"/docs/guides/fallbacks/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/fallbacks/"]},
|
||||
"/docs/guides/model_laboratory/": {"canonical": "https://docs.smith.langchain.com/", "alternative": ["/v0.1/docs/guides/productionization/evaluation/"]},
|
||||
"/docs/guides/pydantic_compatibility/": {"canonical": "/docs/how_to/pydantic_compatibility/", "alternative": ["/v0.1/docs/guides/development/pydantic_compatibility/"]},
|
||||
"/docs/guides/local_llms/": {"canonical": "/docs/how_to/local_llms/", "alternative": ["/v0.1/docs/guides/development/local_llms/"]},
|
||||
"/docs/modules/model_io/quick_start/": {"canonical": "/docs/how_to/#chat-models", "alternative": ["/v0.1/docs/modules/model_io/"]},
|
||||
"/docs/expression_language/how_to/generators/": {"canonical": "/docs/how_to/functions/", "alternative": ["/v0.1/docs/expression_language/primitives/functions/"]},
|
||||
"/docs/expression_language/how_to/functions/": {"canonical": "/docs/how_to/functions/", "alternative": ["/v0.1/docs/expression_language/primitives/functions/"]},
|
||||
"/docs/expression_language/how_to/passthrough/": {"canonical": "/docs/how_to/passthrough/", "alternative": ["/v0.1/docs/expression_language/primitives/passthrough/"]},
|
||||
"/docs/expression_language/how_to/map/": {"canonical": "/docs/how_to/parallel/", "alternative": ["/v0.1/docs/expression_language/primitives/parallel/"]},
|
||||
"/docs/expression_language/how_to/binding/": {"canonical": "/docs/how_to/binding/", "alternative": ["/v0.1/docs/expression_language/primitives/binding/"]},
|
||||
"/docs/expression_language/how_to/configure/": {"canonical": "/docs/how_to/configure/", "alternative": ["/v0.1/docs/expression_language/primitives/configure/"]},
|
||||
"/docs/expression_language/cookbook/prompt_llm_parser/": {"canonical": "/docs/how_to/sequence/", "alternative": ["/v0.1/docs/expression_language/get_started/"]},
|
||||
"/docs/contributing/documentation/": {"canonical": "/docs/contributing/documentation/", "alternative": ["/v0.1/docs/contributing/documentation/technical_logistics/"]},
|
||||
"/docs/expression_language/cookbook/": {"canonical": "/docs/how_to/#langchain-expression-language-lcel", "alternative": ["/v0.1/docs/expression_language/"]},
|
||||
"/docs/integrations/text_embedding/solar/": {"canonical": "/docs/integrations/text_embedding/upstage/"},
|
||||
"/docs/integrations/chat/solar/": {"canonical": "/docs/integrations/chat/upstage/"},
|
||||
// custom ones
|
||||
|
||||
"/docs/modules/model_io/chat/llm_chain/": {
|
||||
"canonical": "/docs/tutorials/llm_chain/"
|
||||
},
|
||||
|
||||
"/docs/modules/agents/toolkits/": {
|
||||
"canonical": "/docs/integrations/tools/",
|
||||
"alternative": [
|
||||
"/v0.1/docs/integrations/toolkits/"
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -26,22 +26,6 @@
|
||||
}
|
||||
],
|
||||
"redirects": [
|
||||
{
|
||||
"source": "/docs/modules/agents/tools/custom_tools(/?)",
|
||||
"destination": "/docs/how_to/custom_tools/"
|
||||
},
|
||||
{
|
||||
"source": "/docs/expression_language(/?)",
|
||||
"destination": "/docs/concepts/#langchain-expression-language-lcel"
|
||||
},
|
||||
{
|
||||
"source": "/docs/expression_language/interface(/?)",
|
||||
"destination": "/docs/concepts/#runnable-interface"
|
||||
},
|
||||
{
|
||||
"source": "/docs/versions/overview(/?)",
|
||||
"destination": "/docs/versions/v0_2/overview/"
|
||||
},
|
||||
{
|
||||
"source": "/docs/how_to/tool_calls_multi_modal(/?)",
|
||||
"destination": "/docs/how_to/multimodal_inputs/"
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
from langchain_cli._version import __version__
|
||||
from importlib import metadata
|
||||
|
||||
__all__ = [
|
||||
"__version__",
|
||||
]
|
||||
try:
|
||||
__version__ = metadata.version(__package__)
|
||||
except metadata.PackageNotFoundError:
|
||||
# Case where package metadata is not available.
|
||||
__version__ = ""
|
||||
del metadata # optional, avoids polluting the results of dir(__package__)
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
from importlib import metadata
|
||||
|
||||
try:
|
||||
__version__ = metadata.version(__package__)
|
||||
except metadata.PackageNotFoundError:
|
||||
# Case where package metadata is not available.
|
||||
__version__ = ""
|
||||
del metadata # optional, avoids polluting the results of dir(__package__)
|
||||
|
||||
__all__ = ["__version__"]
|
||||
@@ -1,15 +1,16 @@
|
||||
import importlib
|
||||
from typing import Optional
|
||||
|
||||
import typer
|
||||
from typing_extensions import Annotated
|
||||
|
||||
from langchain_cli._version import __version__
|
||||
from langchain_cli.namespaces import app as app_namespace
|
||||
from langchain_cli.namespaces import integration as integration_namespace
|
||||
from langchain_cli.namespaces import template as template_namespace
|
||||
from langchain_cli.namespaces.migrate import main as migrate_namespace
|
||||
from langchain_cli.utils.packages import get_langserve_export, get_package_root
|
||||
|
||||
__version__ = "0.0.22rc0"
|
||||
|
||||
app = typer.Typer(no_args_is_help=True, add_completion=False)
|
||||
app.add_typer(
|
||||
template_namespace.package_cli, name="template", help=template_namespace.__doc__
|
||||
@@ -21,16 +22,12 @@ app.add_typer(
|
||||
help=integration_namespace.__doc__,
|
||||
)
|
||||
|
||||
app.command(
|
||||
name="migrate",
|
||||
context_settings={
|
||||
# Let Grit handle the arguments
|
||||
"allow_extra_args": True,
|
||||
"ignore_unknown_options": True,
|
||||
},
|
||||
)(
|
||||
migrate_namespace.migrate,
|
||||
)
|
||||
|
||||
# If libcst is installed, add the migrate namespace
|
||||
if importlib.util.find_spec("libcst"):
|
||||
from langchain_cli.namespaces.migrate import main as migrate_namespace
|
||||
|
||||
app.add_typer(migrate_namespace.app, name="migrate", help=migrate_namespace.__doc__)
|
||||
|
||||
|
||||
def version_callback(show_version: bool) -> None:
|
||||
|
||||
@@ -13,7 +13,7 @@ license = "MIT"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.9,<4.0"
|
||||
langchain-core = "^0.3.0"
|
||||
langchain-core = "^0.3.0.dev"
|
||||
|
||||
[tool.poetry.group.test]
|
||||
optional = true
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
.gritmodules*
|
||||
*.log
|
||||
@@ -1,3 +0,0 @@
|
||||
version: 0.0.1
|
||||
patterns:
|
||||
- name: github.com/getgrit/stdlib#*
|
||||
@@ -1,56 +0,0 @@
|
||||
# Testing the replace_imports migration
|
||||
|
||||
This runs the v0.2 migration with a desired set of rules.
|
||||
|
||||
```grit
|
||||
language python
|
||||
|
||||
langchain_all_migrations()
|
||||
```
|
||||
|
||||
## Single import
|
||||
|
||||
Before:
|
||||
|
||||
```python
|
||||
from langchain.chat_models import ChatOpenAI
|
||||
```
|
||||
|
||||
After:
|
||||
|
||||
```python
|
||||
from langchain_community.chat_models import ChatOpenAI
|
||||
```
|
||||
|
||||
## Community to partner
|
||||
|
||||
```python
|
||||
from langchain_community.chat_models import ChatOpenAI
|
||||
```
|
||||
|
||||
```python
|
||||
from langchain_openai import ChatOpenAI
|
||||
```
|
||||
|
||||
## Noop
|
||||
|
||||
This file should not match at all.
|
||||
|
||||
```python
|
||||
from foo import ChatOpenAI
|
||||
```
|
||||
|
||||
## Mixed imports
|
||||
|
||||
```python
|
||||
from langchain_community.chat_models import ChatOpenAI, ChatAnthropic, foo
|
||||
```
|
||||
|
||||
```python
|
||||
from langchain_community.chat_models import foo
|
||||
|
||||
from langchain_openai import ChatOpenAI
|
||||
|
||||
from langchain_anthropic import ChatAnthropic
|
||||
|
||||
```
|
||||
@@ -1,15 +0,0 @@
|
||||
|
||||
language python
|
||||
|
||||
// This migration is generated automatically - do not manually edit this file
|
||||
pattern langchain_migrate_anthropic() {
|
||||
find_replace_imports(list=[
|
||||
[`langchain_community.chat_models.anthropic`, `ChatAnthropic`, `langchain_anthropic`, `ChatAnthropic`],
|
||||
[`langchain_community.llms.anthropic`, `Anthropic`, `langchain_anthropic`, `Anthropic`],
|
||||
[`langchain_community.chat_models`, `ChatAnthropic`, `langchain_anthropic`, `ChatAnthropic`],
|
||||
[`langchain_community.llms`, `Anthropic`, `langchain_anthropic`, `Anthropic`]
|
||||
])
|
||||
}
|
||||
|
||||
// Add this for invoking directly
|
||||
langchain_migrate_anthropic()
|
||||
@@ -1,67 +0,0 @@
|
||||
|
||||
language python
|
||||
|
||||
// This migration is generated automatically - do not manually edit this file
|
||||
pattern langchain_migrate_astradb() {
|
||||
find_replace_imports(list=[
|
||||
|
||||
[
|
||||
`langchain_community.vectorstores.astradb`,
|
||||
`AstraDB`,
|
||||
`langchain_astradb`,
|
||||
`AstraDBVectorStore`
|
||||
]
|
||||
,
|
||||
|
||||
[
|
||||
`langchain_community.storage.astradb`,
|
||||
`AstraDBByteStore`,
|
||||
`langchain_astradb`,
|
||||
`AstraDBByteStore`
|
||||
]
|
||||
,
|
||||
|
||||
[
|
||||
`langchain_community.storage.astradb`,
|
||||
`AstraDBStore`,
|
||||
`langchain_astradb`,
|
||||
`AstraDBStore`
|
||||
]
|
||||
,
|
||||
|
||||
[
|
||||
`langchain_community.cache`,
|
||||
`AstraDBCache`,
|
||||
`langchain_astradb`,
|
||||
`AstraDBCache`
|
||||
]
|
||||
,
|
||||
|
||||
[
|
||||
`langchain_community.cache`,
|
||||
`AstraDBSemanticCache`,
|
||||
`langchain_astradb`,
|
||||
`AstraDBSemanticCache`
|
||||
]
|
||||
,
|
||||
|
||||
[
|
||||
`langchain_community.chat_message_histories.astradb`,
|
||||
`AstraDBChatMessageHistory`,
|
||||
`langchain_astradb`,
|
||||
`AstraDBChatMessageHistory`
|
||||
]
|
||||
,
|
||||
|
||||
[
|
||||
`langchain_community.document_loaders.astradb`,
|
||||
`AstraDBLoader`,
|
||||
`langchain_astradb`,
|
||||
`AstraDBLoader`
|
||||
]
|
||||
|
||||
])
|
||||
}
|
||||
|
||||
// Add this for invoking directly
|
||||
langchain_migrate_astradb()
|
||||
@@ -1,38 +0,0 @@
|
||||
|
||||
language python
|
||||
|
||||
// This migration is generated automatically - do not manually edit this file
|
||||
pattern langchain_migrate_community_to_core() {
|
||||
find_replace_imports(list=[
|
||||
[`langchain_community.callbacks.tracers`, `ConsoleCallbackHandler`, `langchain_core.tracers`, `ConsoleCallbackHandler`],
|
||||
[`langchain_community.callbacks.tracers`, `FunctionCallbackHandler`, `langchain_core.tracers.stdout`, `FunctionCallbackHandler`],
|
||||
[`langchain_community.callbacks.tracers`, `LangChainTracer`, `langchain_core.tracers`, `LangChainTracer`],
|
||||
[`langchain_community.callbacks.tracers`, `LangChainTracerV1`, `langchain_core.tracers.langchain_v1`, `LangChainTracerV1`],
|
||||
[`langchain_community.docstore.document`, `Document`, `langchain_core.documents`, `Document`],
|
||||
[`langchain_community.document_loaders`, `Blob`, `langchain_core.document_loaders`, `Blob`],
|
||||
[`langchain_community.document_loaders`, `BlobLoader`, `langchain_core.document_loaders`, `BlobLoader`],
|
||||
[`langchain_community.document_loaders.base`, `BaseBlobParser`, `langchain_core.document_loaders`, `BaseBlobParser`],
|
||||
[`langchain_community.document_loaders.base`, `BaseLoader`, `langchain_core.document_loaders`, `BaseLoader`],
|
||||
[`langchain_community.document_loaders.blob_loaders`, `Blob`, `langchain_core.document_loaders`, `Blob`],
|
||||
[`langchain_community.document_loaders.blob_loaders`, `BlobLoader`, `langchain_core.document_loaders`, `BlobLoader`],
|
||||
[`langchain_community.document_loaders.blob_loaders.schema`, `Blob`, `langchain_core.document_loaders`, `Blob`],
|
||||
[`langchain_community.document_loaders.blob_loaders.schema`, `BlobLoader`, `langchain_core.document_loaders`, `BlobLoader`],
|
||||
[`langchain_community.tools`, `BaseTool`, `langchain_core.tools`, `BaseTool`],
|
||||
[`langchain_community.tools`, `StructuredTool`, `langchain_core.tools`, `StructuredTool`],
|
||||
[`langchain_community.tools`, `Tool`, `langchain_core.tools`, `Tool`],
|
||||
[`langchain_community.tools`, `format_tool_to_openai_function`, `langchain_core.utils.function_calling`, `format_tool_to_openai_function`],
|
||||
[`langchain_community.tools`, `tool`, `langchain_core.tools`, `tool`],
|
||||
[`langchain_community.tools.convert_to_openai`, `format_tool_to_openai_function`, `langchain_core.utils.function_calling`, `format_tool_to_openai_function`],
|
||||
[`langchain_community.tools.convert_to_openai`, `format_tool_to_openai_tool`, `langchain_core.utils.function_calling`, `format_tool_to_openai_tool`],
|
||||
[`langchain_community.tools.render`, `format_tool_to_openai_function`, `langchain_core.utils.function_calling`, `format_tool_to_openai_function`],
|
||||
[`langchain_community.tools.render`, `format_tool_to_openai_tool`, `langchain_core.utils.function_calling`, `format_tool_to_openai_tool`],
|
||||
[`langchain_community.utils.openai_functions`, `FunctionDescription`, `langchain_core.utils.function_calling`, `FunctionDescription`],
|
||||
[`langchain_community.utils.openai_functions`, `ToolDescription`, `langchain_core.utils.function_calling`, `ToolDescription`],
|
||||
[`langchain_community.utils.openai_functions`, `convert_pydantic_to_openai_function`, `langchain_core.utils.function_calling`, `convert_pydantic_to_openai_function`],
|
||||
[`langchain_community.utils.openai_functions`, `convert_pydantic_to_openai_tool`, `langchain_core.utils.function_calling`, `convert_pydantic_to_openai_tool`],
|
||||
[`langchain_community.vectorstores`, `VectorStore`, `langchain_core.vectorstores`, `VectorStore`]
|
||||
])
|
||||
}
|
||||
|
||||
// Add this for invoking directly
|
||||
langchain_migrate_community_to_core()
|
||||
@@ -1,18 +0,0 @@
|
||||
language python
|
||||
|
||||
pattern langchain_all_migrations() {
|
||||
any {
|
||||
langchain_migrate_community_to_core(),
|
||||
langchain_migrate_fireworks(),
|
||||
langchain_migrate_ibm(),
|
||||
langchain_migrate_langchain_to_core(),
|
||||
langchain_migrate_langchain_to_langchain_community(),
|
||||
langchain_migrate_langchain_to_textsplitters(),
|
||||
langchain_migrate_openai(),
|
||||
langchain_migrate_pinecone(),
|
||||
langchain_migrate_anthropic(),
|
||||
replace_pydantic_v1_shim()
|
||||
}
|
||||
}
|
||||
|
||||
langchain_all_migrations()
|
||||
@@ -1,15 +0,0 @@
|
||||
|
||||
language python
|
||||
|
||||
// This migration is generated automatically - do not manually edit this file
|
||||
pattern langchain_migrate_fireworks() {
|
||||
find_replace_imports(list=[
|
||||
[`langchain_community.chat_models.fireworks`, `ChatFireworks`, `langchain_fireworks`, `ChatFireworks`],
|
||||
[`langchain_community.llms.fireworks`, `Fireworks`, `langchain_fireworks`, `Fireworks`],
|
||||
[`langchain_community.chat_models`, `ChatFireworks`, `langchain_fireworks`, `ChatFireworks`],
|
||||
[`langchain_community.llms`, `Fireworks`, `langchain_fireworks`, `Fireworks`]
|
||||
])
|
||||
}
|
||||
|
||||
// Add this for invoking directly
|
||||
langchain_migrate_fireworks()
|
||||
@@ -1,13 +0,0 @@
|
||||
|
||||
language python
|
||||
|
||||
// This migration is generated automatically - do not manually edit this file
|
||||
pattern langchain_migrate_ibm() {
|
||||
find_replace_imports(list=[
|
||||
[`langchain_community.llms.watsonxllm`, `WatsonxLLM`, `langchain_ibm`, `WatsonxLLM`],
|
||||
[`langchain_community.llms`, `WatsonxLLM`, `langchain_ibm`, `WatsonxLLM`]
|
||||
])
|
||||
}
|
||||
|
||||
// Add this for invoking directly
|
||||
langchain_migrate_ibm()
|
||||
@@ -1,542 +0,0 @@
|
||||
|
||||
language python
|
||||
|
||||
// This migration is generated automatically - do not manually edit this file
|
||||
pattern langchain_migrate_langchain_to_core() {
|
||||
find_replace_imports(list=[
|
||||
[`langchain._api`, `deprecated`, `langchain_core._api`, `deprecated`],
|
||||
[`langchain._api`, `LangChainDeprecationWarning`, `langchain_core._api`, `LangChainDeprecationWarning`],
|
||||
[`langchain._api`, `suppress_langchain_deprecation_warning`, `langchain_core._api`, `suppress_langchain_deprecation_warning`],
|
||||
[`langchain._api`, `surface_langchain_deprecation_warnings`, `langchain_core._api`, `surface_langchain_deprecation_warnings`],
|
||||
[`langchain._api`, `warn_deprecated`, `langchain_core._api`, `warn_deprecated`],
|
||||
[`langchain._api.deprecation`, `LangChainDeprecationWarning`, `langchain_core._api`, `LangChainDeprecationWarning`],
|
||||
[`langchain._api.deprecation`, `LangChainPendingDeprecationWarning`, `langchain_core._api.deprecation`, `LangChainPendingDeprecationWarning`],
|
||||
[`langchain._api.deprecation`, `deprecated`, `langchain_core._api`, `deprecated`],
|
||||
[`langchain._api.deprecation`, `suppress_langchain_deprecation_warning`, `langchain_core._api`, `suppress_langchain_deprecation_warning`],
|
||||
[`langchain._api.deprecation`, `warn_deprecated`, `langchain_core._api`, `warn_deprecated`],
|
||||
[`langchain._api.deprecation`, `surface_langchain_deprecation_warnings`, `langchain_core._api`, `surface_langchain_deprecation_warnings`],
|
||||
[`langchain._api.path`, `get_relative_path`, `langchain_core._api`, `get_relative_path`],
|
||||
[`langchain._api.path`, `as_import_path`, `langchain_core._api`, `as_import_path`],
|
||||
[`langchain.agents`, `Tool`, `langchain_core.tools`, `Tool`],
|
||||
[`langchain.agents`, `tool`, `langchain_core.tools`, `tool`],
|
||||
[`langchain.agents.tools`, `BaseTool`, `langchain_core.tools`, `BaseTool`],
|
||||
[`langchain.agents.tools`, `tool`, `langchain_core.tools`, `tool`],
|
||||
[`langchain.agents.tools`, `Tool`, `langchain_core.tools`, `Tool`],
|
||||
[`langchain.base_language`, `BaseLanguageModel`, `langchain_core.language_models`, `BaseLanguageModel`],
|
||||
[`langchain.callbacks`, `StdOutCallbackHandler`, `langchain_core.callbacks`, `StdOutCallbackHandler`],
|
||||
[`langchain.callbacks`, `StreamingStdOutCallbackHandler`, `langchain_core.callbacks`, `StreamingStdOutCallbackHandler`],
|
||||
[`langchain.callbacks`, `LangChainTracer`, `langchain_core.tracers`, `LangChainTracer`],
|
||||
[`langchain.callbacks`, `tracing_enabled`, `langchain_core.tracers.context`, `tracing_enabled`],
|
||||
[`langchain.callbacks`, `tracing_v2_enabled`, `langchain_core.tracers.context`, `tracing_v2_enabled`],
|
||||
[`langchain.callbacks`, `collect_runs`, `langchain_core.tracers.context`, `collect_runs`],
|
||||
[`langchain.callbacks.base`, `RetrieverManagerMixin`, `langchain_core.callbacks`, `RetrieverManagerMixin`],
|
||||
[`langchain.callbacks.base`, `LLMManagerMixin`, `langchain_core.callbacks`, `LLMManagerMixin`],
|
||||
[`langchain.callbacks.base`, `ChainManagerMixin`, `langchain_core.callbacks`, `ChainManagerMixin`],
|
||||
[`langchain.callbacks.base`, `ToolManagerMixin`, `langchain_core.callbacks`, `ToolManagerMixin`],
|
||||
[`langchain.callbacks.base`, `CallbackManagerMixin`, `langchain_core.callbacks`, `CallbackManagerMixin`],
|
||||
[`langchain.callbacks.base`, `RunManagerMixin`, `langchain_core.callbacks`, `RunManagerMixin`],
|
||||
[`langchain.callbacks.base`, `BaseCallbackHandler`, `langchain_core.callbacks`, `BaseCallbackHandler`],
|
||||
[`langchain.callbacks.base`, `AsyncCallbackHandler`, `langchain_core.callbacks`, `AsyncCallbackHandler`],
|
||||
[`langchain.callbacks.base`, `BaseCallbackManager`, `langchain_core.callbacks`, `BaseCallbackManager`],
|
||||
[`langchain.callbacks.manager`, `BaseRunManager`, `langchain_core.callbacks`, `BaseRunManager`],
|
||||
[`langchain.callbacks.manager`, `RunManager`, `langchain_core.callbacks`, `RunManager`],
|
||||
[`langchain.callbacks.manager`, `ParentRunManager`, `langchain_core.callbacks`, `ParentRunManager`],
|
||||
[`langchain.callbacks.manager`, `AsyncRunManager`, `langchain_core.callbacks`, `AsyncRunManager`],
|
||||
[`langchain.callbacks.manager`, `AsyncParentRunManager`, `langchain_core.callbacks`, `AsyncParentRunManager`],
|
||||
[`langchain.callbacks.manager`, `CallbackManagerForLLMRun`, `langchain_core.callbacks`, `CallbackManagerForLLMRun`],
|
||||
[`langchain.callbacks.manager`, `AsyncCallbackManagerForLLMRun`, `langchain_core.callbacks`, `AsyncCallbackManagerForLLMRun`],
|
||||
[`langchain.callbacks.manager`, `CallbackManagerForChainRun`, `langchain_core.callbacks`, `CallbackManagerForChainRun`],
|
||||
[`langchain.callbacks.manager`, `AsyncCallbackManagerForChainRun`, `langchain_core.callbacks`, `AsyncCallbackManagerForChainRun`],
|
||||
[`langchain.callbacks.manager`, `CallbackManagerForToolRun`, `langchain_core.callbacks`, `CallbackManagerForToolRun`],
|
||||
[`langchain.callbacks.manager`, `AsyncCallbackManagerForToolRun`, `langchain_core.callbacks`, `AsyncCallbackManagerForToolRun`],
|
||||
[`langchain.callbacks.manager`, `CallbackManagerForRetrieverRun`, `langchain_core.callbacks`, `CallbackManagerForRetrieverRun`],
|
||||
[`langchain.callbacks.manager`, `AsyncCallbackManagerForRetrieverRun`, `langchain_core.callbacks`, `AsyncCallbackManagerForRetrieverRun`],
|
||||
[`langchain.callbacks.manager`, `CallbackManager`, `langchain_core.callbacks`, `CallbackManager`],
|
||||
[`langchain.callbacks.manager`, `CallbackManagerForChainGroup`, `langchain_core.callbacks`, `CallbackManagerForChainGroup`],
|
||||
[`langchain.callbacks.manager`, `AsyncCallbackManager`, `langchain_core.callbacks`, `AsyncCallbackManager`],
|
||||
[`langchain.callbacks.manager`, `AsyncCallbackManagerForChainGroup`, `langchain_core.callbacks`, `AsyncCallbackManagerForChainGroup`],
|
||||
[`langchain.callbacks.manager`, `tracing_enabled`, `langchain_core.tracers.context`, `tracing_enabled`],
|
||||
[`langchain.callbacks.manager`, `tracing_v2_enabled`, `langchain_core.tracers.context`, `tracing_v2_enabled`],
|
||||
[`langchain.callbacks.manager`, `collect_runs`, `langchain_core.tracers.context`, `collect_runs`],
|
||||
[`langchain.callbacks.manager`, `atrace_as_chain_group`, `langchain_core.callbacks.manager`, `atrace_as_chain_group`],
|
||||
[`langchain.callbacks.manager`, `trace_as_chain_group`, `langchain_core.callbacks.manager`, `trace_as_chain_group`],
|
||||
[`langchain.callbacks.manager`, `handle_event`, `langchain_core.callbacks.manager`, `handle_event`],
|
||||
[`langchain.callbacks.manager`, `ahandle_event`, `langchain_core.callbacks.manager`, `ahandle_event`],
|
||||
[`langchain.callbacks.manager`, `env_var_is_set`, `langchain_core.utils.env`, `env_var_is_set`],
|
||||
[`langchain.callbacks.stdout`, `StdOutCallbackHandler`, `langchain_core.callbacks`, `StdOutCallbackHandler`],
|
||||
[`langchain.callbacks.streaming_stdout`, `StreamingStdOutCallbackHandler`, `langchain_core.callbacks`, `StreamingStdOutCallbackHandler`],
|
||||
[`langchain.callbacks.tracers`, `ConsoleCallbackHandler`, `langchain_core.tracers`, `ConsoleCallbackHandler`],
|
||||
[`langchain.callbacks.tracers`, `FunctionCallbackHandler`, `langchain_core.tracers.stdout`, `FunctionCallbackHandler`],
|
||||
[`langchain.callbacks.tracers`, `LangChainTracer`, `langchain_core.tracers`, `LangChainTracer`],
|
||||
[`langchain.callbacks.tracers`, `LangChainTracerV1`, `langchain_core.tracers.langchain_v1`, `LangChainTracerV1`],
|
||||
[`langchain.callbacks.tracers.base`, `BaseTracer`, `langchain_core.tracers`, `BaseTracer`],
|
||||
[`langchain.callbacks.tracers.base`, `TracerException`, `langchain_core.exceptions`, `TracerException`],
|
||||
[`langchain.callbacks.tracers.evaluation`, `wait_for_all_evaluators`, `langchain_core.tracers.evaluation`, `wait_for_all_evaluators`],
|
||||
[`langchain.callbacks.tracers.evaluation`, `EvaluatorCallbackHandler`, `langchain_core.tracers`, `EvaluatorCallbackHandler`],
|
||||
[`langchain.callbacks.tracers.langchain`, `LangChainTracer`, `langchain_core.tracers`, `LangChainTracer`],
|
||||
[`langchain.callbacks.tracers.langchain`, `wait_for_all_tracers`, `langchain_core.tracers.langchain`, `wait_for_all_tracers`],
|
||||
[`langchain.callbacks.tracers.langchain_v1`, `LangChainTracerV1`, `langchain_core.tracers.langchain_v1`, `LangChainTracerV1`],
|
||||
[`langchain.callbacks.tracers.log_stream`, `LogEntry`, `langchain_core.tracers.log_stream`, `LogEntry`],
|
||||
[`langchain.callbacks.tracers.log_stream`, `RunState`, `langchain_core.tracers.log_stream`, `RunState`],
|
||||
[`langchain.callbacks.tracers.log_stream`, `RunLog`, `langchain_core.tracers`, `RunLog`],
|
||||
[`langchain.callbacks.tracers.log_stream`, `RunLogPatch`, `langchain_core.tracers`, `RunLogPatch`],
|
||||
[`langchain.callbacks.tracers.log_stream`, `LogStreamCallbackHandler`, `langchain_core.tracers`, `LogStreamCallbackHandler`],
|
||||
[`langchain.callbacks.tracers.root_listeners`, `RootListenersTracer`, `langchain_core.tracers.root_listeners`, `RootListenersTracer`],
|
||||
[`langchain.callbacks.tracers.run_collector`, `RunCollectorCallbackHandler`, `langchain_core.tracers.run_collector`, `RunCollectorCallbackHandler`],
|
||||
[`langchain.callbacks.tracers.schemas`, `BaseRun`, `langchain_core.tracers.schemas`, `BaseRun`],
|
||||
[`langchain.callbacks.tracers.schemas`, `ChainRun`, `langchain_core.tracers.schemas`, `ChainRun`],
|
||||
[`langchain.callbacks.tracers.schemas`, `LLMRun`, `langchain_core.tracers.schemas`, `LLMRun`],
|
||||
[`langchain.callbacks.tracers.schemas`, `Run`, `langchain_core.tracers`, `Run`],
|
||||
[`langchain.callbacks.tracers.schemas`, `RunTypeEnum`, `langchain_core.tracers.schemas`, `RunTypeEnum`],
|
||||
[`langchain.callbacks.tracers.schemas`, `ToolRun`, `langchain_core.tracers.schemas`, `ToolRun`],
|
||||
[`langchain.callbacks.tracers.schemas`, `TracerSession`, `langchain_core.tracers.schemas`, `TracerSession`],
|
||||
[`langchain.callbacks.tracers.schemas`, `TracerSessionBase`, `langchain_core.tracers.schemas`, `TracerSessionBase`],
|
||||
[`langchain.callbacks.tracers.schemas`, `TracerSessionV1`, `langchain_core.tracers.schemas`, `TracerSessionV1`],
|
||||
[`langchain.callbacks.tracers.schemas`, `TracerSessionV1Base`, `langchain_core.tracers.schemas`, `TracerSessionV1Base`],
|
||||
[`langchain.callbacks.tracers.schemas`, `TracerSessionV1Create`, `langchain_core.tracers.schemas`, `TracerSessionV1Create`],
|
||||
[`langchain.callbacks.tracers.stdout`, `FunctionCallbackHandler`, `langchain_core.tracers.stdout`, `FunctionCallbackHandler`],
|
||||
[`langchain.callbacks.tracers.stdout`, `ConsoleCallbackHandler`, `langchain_core.tracers`, `ConsoleCallbackHandler`],
|
||||
[`langchain.chains.openai_functions`, `convert_to_openai_function`, `langchain_core.utils.function_calling`, `convert_to_openai_function`],
|
||||
[`langchain.chains.openai_functions.base`, `convert_to_openai_function`, `langchain_core.utils.function_calling`, `convert_to_openai_function`],
|
||||
[`langchain.chat_models.base`, `BaseChatModel`, `langchain_core.language_models`, `BaseChatModel`],
|
||||
[`langchain.chat_models.base`, `SimpleChatModel`, `langchain_core.language_models`, `SimpleChatModel`],
|
||||
[`langchain.chat_models.base`, `generate_from_stream`, `langchain_core.language_models.chat_models`, `generate_from_stream`],
|
||||
[`langchain.chat_models.base`, `agenerate_from_stream`, `langchain_core.language_models.chat_models`, `agenerate_from_stream`],
|
||||
[`langchain.docstore.document`, `Document`, `langchain_core.documents`, `Document`],
|
||||
[`langchain.document_loaders`, `Blob`, `langchain_core.document_loaders`, `Blob`],
|
||||
[`langchain.document_loaders`, `BlobLoader`, `langchain_core.document_loaders`, `BlobLoader`],
|
||||
[`langchain.document_loaders.base`, `BaseLoader`, `langchain_core.document_loaders`, `BaseLoader`],
|
||||
[`langchain.document_loaders.base`, `BaseBlobParser`, `langchain_core.document_loaders`, `BaseBlobParser`],
|
||||
[`langchain.document_loaders.blob_loaders`, `BlobLoader`, `langchain_core.document_loaders`, `BlobLoader`],
|
||||
[`langchain.document_loaders.blob_loaders`, `Blob`, `langchain_core.document_loaders`, `Blob`],
|
||||
[`langchain.document_loaders.blob_loaders.schema`, `Blob`, `langchain_core.document_loaders`, `Blob`],
|
||||
[`langchain.document_loaders.blob_loaders.schema`, `BlobLoader`, `langchain_core.document_loaders`, `BlobLoader`],
|
||||
[`langchain.embeddings.base`, `Embeddings`, `langchain_core.embeddings`, `Embeddings`],
|
||||
[`langchain.formatting`, `StrictFormatter`, `langchain_core.utils`, `StrictFormatter`],
|
||||
[`langchain.input`, `get_bolded_text`, `langchain_core.utils`, `get_bolded_text`],
|
||||
[`langchain.input`, `get_color_mapping`, `langchain_core.utils`, `get_color_mapping`],
|
||||
[`langchain.input`, `get_colored_text`, `langchain_core.utils`, `get_colored_text`],
|
||||
[`langchain.input`, `print_text`, `langchain_core.utils`, `print_text`],
|
||||
[`langchain.llms.base`, `BaseLanguageModel`, `langchain_core.language_models`, `BaseLanguageModel`],
|
||||
[`langchain.llms.base`, `BaseLLM`, `langchain_core.language_models`, `BaseLLM`],
|
||||
[`langchain.llms.base`, `LLM`, `langchain_core.language_models`, `LLM`],
|
||||
[`langchain.load`, `dumpd`, `langchain_core.load`, `dumpd`],
|
||||
[`langchain.load`, `dumps`, `langchain_core.load`, `dumps`],
|
||||
[`langchain.load`, `load`, `langchain_core.load`, `load`],
|
||||
[`langchain.load`, `loads`, `langchain_core.load`, `loads`],
|
||||
[`langchain.load.dump`, `default`, `langchain_core.load.dump`, `default`],
|
||||
[`langchain.load.dump`, `dumps`, `langchain_core.load`, `dumps`],
|
||||
[`langchain.load.dump`, `dumpd`, `langchain_core.load`, `dumpd`],
|
||||
[`langchain.load.load`, `Reviver`, `langchain_core.load.load`, `Reviver`],
|
||||
[`langchain.load.load`, `loads`, `langchain_core.load`, `loads`],
|
||||
[`langchain.load.load`, `load`, `langchain_core.load`, `load`],
|
||||
[`langchain.load.serializable`, `BaseSerialized`, `langchain_core.load.serializable`, `BaseSerialized`],
|
||||
[`langchain.load.serializable`, `SerializedConstructor`, `langchain_core.load.serializable`, `SerializedConstructor`],
|
||||
[`langchain.load.serializable`, `SerializedSecret`, `langchain_core.load.serializable`, `SerializedSecret`],
|
||||
[`langchain.load.serializable`, `SerializedNotImplemented`, `langchain_core.load.serializable`, `SerializedNotImplemented`],
|
||||
[`langchain.load.serializable`, `try_neq_default`, `langchain_core.load.serializable`, `try_neq_default`],
|
||||
[`langchain.load.serializable`, `Serializable`, `langchain_core.load`, `Serializable`],
|
||||
[`langchain.load.serializable`, `to_json_not_implemented`, `langchain_core.load.serializable`, `to_json_not_implemented`],
|
||||
[`langchain.output_parsers`, `CommaSeparatedListOutputParser`, `langchain_core.output_parsers`, `CommaSeparatedListOutputParser`],
|
||||
[`langchain.output_parsers`, `ListOutputParser`, `langchain_core.output_parsers`, `ListOutputParser`],
|
||||
[`langchain.output_parsers`, `MarkdownListOutputParser`, `langchain_core.output_parsers`, `MarkdownListOutputParser`],
|
||||
[`langchain.output_parsers`, `NumberedListOutputParser`, `langchain_core.output_parsers`, `NumberedListOutputParser`],
|
||||
[`langchain.output_parsers`, `PydanticOutputParser`, `langchain_core.output_parsers`, `PydanticOutputParser`],
|
||||
[`langchain.output_parsers`, `XMLOutputParser`, `langchain_core.output_parsers`, `XMLOutputParser`],
|
||||
[`langchain.output_parsers`, `JsonOutputToolsParser`, `langchain_core.output_parsers.openai_tools`, `JsonOutputToolsParser`],
|
||||
[`langchain.output_parsers`, `PydanticToolsParser`, `langchain_core.output_parsers.openai_tools`, `PydanticToolsParser`],
|
||||
[`langchain.output_parsers`, `JsonOutputKeyToolsParser`, `langchain_core.output_parsers.openai_tools`, `JsonOutputKeyToolsParser`],
|
||||
[`langchain.output_parsers.json`, `SimpleJsonOutputParser`, `langchain_core.output_parsers`, `JsonOutputParser`],
|
||||
[`langchain.output_parsers.json`, `parse_partial_json`, `langchain_core.utils.json`, `parse_partial_json`],
|
||||
[`langchain.output_parsers.json`, `parse_json_markdown`, `langchain_core.utils.json`, `parse_json_markdown`],
|
||||
[`langchain.output_parsers.json`, `parse_and_check_json_markdown`, `langchain_core.utils.json`, `parse_and_check_json_markdown`],
|
||||
[`langchain.output_parsers.list`, `ListOutputParser`, `langchain_core.output_parsers`, `ListOutputParser`],
|
||||
[`langchain.output_parsers.list`, `CommaSeparatedListOutputParser`, `langchain_core.output_parsers`, `CommaSeparatedListOutputParser`],
|
||||
[`langchain.output_parsers.list`, `NumberedListOutputParser`, `langchain_core.output_parsers`, `NumberedListOutputParser`],
|
||||
[`langchain.output_parsers.list`, `MarkdownListOutputParser`, `langchain_core.output_parsers`, `MarkdownListOutputParser`],
|
||||
[`langchain.output_parsers.openai_functions`, `PydanticOutputFunctionsParser`, `langchain_core.output_parsers.openai_functions`, `PydanticOutputFunctionsParser`],
|
||||
[`langchain.output_parsers.openai_functions`, `PydanticAttrOutputFunctionsParser`, `langchain_core.output_parsers.openai_functions`, `PydanticAttrOutputFunctionsParser`],
|
||||
[`langchain.output_parsers.openai_functions`, `JsonOutputFunctionsParser`, `langchain_core.output_parsers.openai_functions`, `JsonOutputFunctionsParser`],
|
||||
[`langchain.output_parsers.openai_functions`, `JsonKeyOutputFunctionsParser`, `langchain_core.output_parsers.openai_functions`, `JsonKeyOutputFunctionsParser`],
|
||||
[`langchain.output_parsers.openai_tools`, `PydanticToolsParser`, `langchain_core.output_parsers.openai_tools`, `PydanticToolsParser`],
|
||||
[`langchain.output_parsers.openai_tools`, `JsonOutputToolsParser`, `langchain_core.output_parsers.openai_tools`, `JsonOutputToolsParser`],
|
||||
[`langchain.output_parsers.openai_tools`, `JsonOutputKeyToolsParser`, `langchain_core.output_parsers.openai_tools`, `JsonOutputKeyToolsParser`],
|
||||
[`langchain.output_parsers.pydantic`, `PydanticOutputParser`, `langchain_core.output_parsers`, `PydanticOutputParser`],
|
||||
[`langchain.output_parsers.xml`, `XMLOutputParser`, `langchain_core.output_parsers`, `XMLOutputParser`],
|
||||
[`langchain.prompts`, `AIMessagePromptTemplate`, `langchain_core.prompts`, `AIMessagePromptTemplate`],
|
||||
[`langchain.prompts`, `BaseChatPromptTemplate`, `langchain_core.prompts`, `BaseChatPromptTemplate`],
|
||||
[`langchain.prompts`, `BasePromptTemplate`, `langchain_core.prompts`, `BasePromptTemplate`],
|
||||
[`langchain.prompts`, `ChatMessagePromptTemplate`, `langchain_core.prompts`, `ChatMessagePromptTemplate`],
|
||||
[`langchain.prompts`, `ChatPromptTemplate`, `langchain_core.prompts`, `ChatPromptTemplate`],
|
||||
[`langchain.prompts`, `FewShotPromptTemplate`, `langchain_core.prompts`, `FewShotPromptTemplate`],
|
||||
[`langchain.prompts`, `FewShotPromptWithTemplates`, `langchain_core.prompts`, `FewShotPromptWithTemplates`],
|
||||
[`langchain.prompts`, `HumanMessagePromptTemplate`, `langchain_core.prompts`, `HumanMessagePromptTemplate`],
|
||||
[`langchain.prompts`, `LengthBasedExampleSelector`, `langchain_core.example_selectors`, `LengthBasedExampleSelector`],
|
||||
[`langchain.prompts`, `MaxMarginalRelevanceExampleSelector`, `langchain_core.example_selectors`, `MaxMarginalRelevanceExampleSelector`],
|
||||
[`langchain.prompts`, `MessagesPlaceholder`, `langchain_core.prompts`, `MessagesPlaceholder`],
|
||||
[`langchain.prompts`, `PipelinePromptTemplate`, `langchain_core.prompts`, `PipelinePromptTemplate`],
|
||||
[`langchain.prompts`, `PromptTemplate`, `langchain_core.prompts`, `PromptTemplate`],
|
||||
[`langchain.prompts`, `SemanticSimilarityExampleSelector`, `langchain_core.example_selectors`, `SemanticSimilarityExampleSelector`],
|
||||
[`langchain.prompts`, `StringPromptTemplate`, `langchain_core.prompts`, `StringPromptTemplate`],
|
||||
[`langchain.prompts`, `SystemMessagePromptTemplate`, `langchain_core.prompts`, `SystemMessagePromptTemplate`],
|
||||
[`langchain.prompts`, `load_prompt`, `langchain_core.prompts`, `load_prompt`],
|
||||
[`langchain.prompts`, `FewShotChatMessagePromptTemplate`, `langchain_core.prompts`, `FewShotChatMessagePromptTemplate`],
|
||||
[`langchain.prompts`, `Prompt`, `langchain_core.prompts`, `PromptTemplate`],
|
||||
[`langchain.prompts.base`, `jinja2_formatter`, `langchain_core.prompts`, `jinja2_formatter`],
|
||||
[`langchain.prompts.base`, `validate_jinja2`, `langchain_core.prompts`, `validate_jinja2`],
|
||||
[`langchain.prompts.base`, `check_valid_template`, `langchain_core.prompts`, `check_valid_template`],
|
||||
[`langchain.prompts.base`, `get_template_variables`, `langchain_core.prompts`, `get_template_variables`],
|
||||
[`langchain.prompts.base`, `StringPromptTemplate`, `langchain_core.prompts`, `StringPromptTemplate`],
|
||||
[`langchain.prompts.base`, `BasePromptTemplate`, `langchain_core.prompts`, `BasePromptTemplate`],
|
||||
[`langchain.prompts.base`, `StringPromptValue`, `langchain_core.prompt_values`, `StringPromptValue`],
|
||||
[`langchain.prompts.base`, `_get_jinja2_variables_from_template`, `langchain_core.prompts.string`, `_get_jinja2_variables_from_template`],
|
||||
[`langchain.prompts.chat`, `BaseMessagePromptTemplate`, `langchain_core.prompts.chat`, `BaseMessagePromptTemplate`],
|
||||
[`langchain.prompts.chat`, `MessagesPlaceholder`, `langchain_core.prompts`, `MessagesPlaceholder`],
|
||||
[`langchain.prompts.chat`, `BaseStringMessagePromptTemplate`, `langchain_core.prompts.chat`, `BaseStringMessagePromptTemplate`],
|
||||
[`langchain.prompts.chat`, `ChatMessagePromptTemplate`, `langchain_core.prompts`, `ChatMessagePromptTemplate`],
|
||||
[`langchain.prompts.chat`, `HumanMessagePromptTemplate`, `langchain_core.prompts`, `HumanMessagePromptTemplate`],
|
||||
[`langchain.prompts.chat`, `AIMessagePromptTemplate`, `langchain_core.prompts`, `AIMessagePromptTemplate`],
|
||||
[`langchain.prompts.chat`, `SystemMessagePromptTemplate`, `langchain_core.prompts`, `SystemMessagePromptTemplate`],
|
||||
[`langchain.prompts.chat`, `BaseChatPromptTemplate`, `langchain_core.prompts`, `BaseChatPromptTemplate`],
|
||||
[`langchain.prompts.chat`, `ChatPromptTemplate`, `langchain_core.prompts`, `ChatPromptTemplate`],
|
||||
[`langchain.prompts.chat`, `ChatPromptValue`, `langchain_core.prompt_values`, `ChatPromptValue`],
|
||||
[`langchain.prompts.chat`, `ChatPromptValueConcrete`, `langchain_core.prompt_values`, `ChatPromptValueConcrete`],
|
||||
[`langchain.prompts.chat`, `_convert_to_message`, `langchain_core.prompts.chat`, `_convert_to_message`],
|
||||
[`langchain.prompts.chat`, `_create_template_from_message_type`, `langchain_core.prompts.chat`, `_create_template_from_message_type`],
|
||||
[`langchain.prompts.example_selector`, `LengthBasedExampleSelector`, `langchain_core.example_selectors`, `LengthBasedExampleSelector`],
|
||||
[`langchain.prompts.example_selector`, `MaxMarginalRelevanceExampleSelector`, `langchain_core.example_selectors`, `MaxMarginalRelevanceExampleSelector`],
|
||||
[`langchain.prompts.example_selector`, `SemanticSimilarityExampleSelector`, `langchain_core.example_selectors`, `SemanticSimilarityExampleSelector`],
|
||||
[`langchain.prompts.example_selector.base`, `BaseExampleSelector`, `langchain_core.example_selectors`, `BaseExampleSelector`],
|
||||
[`langchain.prompts.example_selector.length_based`, `LengthBasedExampleSelector`, `langchain_core.example_selectors`, `LengthBasedExampleSelector`],
|
||||
[`langchain.prompts.example_selector.semantic_similarity`, `sorted_values`, `langchain_core.example_selectors`, `sorted_values`],
|
||||
[`langchain.prompts.example_selector.semantic_similarity`, `SemanticSimilarityExampleSelector`, `langchain_core.example_selectors`, `SemanticSimilarityExampleSelector`],
|
||||
[`langchain.prompts.example_selector.semantic_similarity`, `MaxMarginalRelevanceExampleSelector`, `langchain_core.example_selectors`, `MaxMarginalRelevanceExampleSelector`],
|
||||
[`langchain.prompts.few_shot`, `FewShotPromptTemplate`, `langchain_core.prompts`, `FewShotPromptTemplate`],
|
||||
[`langchain.prompts.few_shot`, `FewShotChatMessagePromptTemplate`, `langchain_core.prompts`, `FewShotChatMessagePromptTemplate`],
|
||||
[`langchain.prompts.few_shot`, `_FewShotPromptTemplateMixin`, `langchain_core.prompts.few_shot`, `_FewShotPromptTemplateMixin`],
|
||||
[`langchain.prompts.few_shot_with_templates`, `FewShotPromptWithTemplates`, `langchain_core.prompts`, `FewShotPromptWithTemplates`],
|
||||
[`langchain.prompts.loading`, `load_prompt_from_config`, `langchain_core.prompts.loading`, `load_prompt_from_config`],
|
||||
[`langchain.prompts.loading`, `load_prompt`, `langchain_core.prompts`, `load_prompt`],
|
||||
[`langchain.prompts.loading`, `try_load_from_hub`, `langchain_core.utils`, `try_load_from_hub`],
|
||||
[`langchain.prompts.loading`, `_load_examples`, `langchain_core.prompts.loading`, `_load_examples`],
|
||||
[`langchain.prompts.loading`, `_load_few_shot_prompt`, `langchain_core.prompts.loading`, `_load_few_shot_prompt`],
|
||||
[`langchain.prompts.loading`, `_load_output_parser`, `langchain_core.prompts.loading`, `_load_output_parser`],
|
||||
[`langchain.prompts.loading`, `_load_prompt`, `langchain_core.prompts.loading`, `_load_prompt`],
|
||||
[`langchain.prompts.loading`, `_load_prompt_from_file`, `langchain_core.prompts.loading`, `_load_prompt_from_file`],
|
||||
[`langchain.prompts.loading`, `_load_template`, `langchain_core.prompts.loading`, `_load_template`],
|
||||
[`langchain.prompts.pipeline`, `PipelinePromptTemplate`, `langchain_core.prompts`, `PipelinePromptTemplate`],
|
||||
[`langchain.prompts.pipeline`, `_get_inputs`, `langchain_core.prompts.pipeline`, `_get_inputs`],
|
||||
[`langchain.prompts.prompt`, `PromptTemplate`, `langchain_core.prompts`, `PromptTemplate`],
|
||||
[`langchain.prompts.prompt`, `Prompt`, `langchain_core.prompts`, `PromptTemplate`],
|
||||
[`langchain.schema`, `BaseCache`, `langchain_core.caches`, `BaseCache`],
|
||||
[`langchain.schema`, `BaseMemory`, `langchain_core.memory`, `BaseMemory`],
|
||||
[`langchain.schema`, `BaseStore`, `langchain_core.stores`, `BaseStore`],
|
||||
[`langchain.schema`, `AgentFinish`, `langchain_core.agents`, `AgentFinish`],
|
||||
[`langchain.schema`, `AgentAction`, `langchain_core.agents`, `AgentAction`],
|
||||
[`langchain.schema`, `Document`, `langchain_core.documents`, `Document`],
|
||||
[`langchain.schema`, `BaseChatMessageHistory`, `langchain_core.chat_history`, `BaseChatMessageHistory`],
|
||||
[`langchain.schema`, `BaseDocumentTransformer`, `langchain_core.documents`, `BaseDocumentTransformer`],
|
||||
[`langchain.schema`, `BaseMessage`, `langchain_core.messages`, `BaseMessage`],
|
||||
[`langchain.schema`, `ChatMessage`, `langchain_core.messages`, `ChatMessage`],
|
||||
[`langchain.schema`, `FunctionMessage`, `langchain_core.messages`, `FunctionMessage`],
|
||||
[`langchain.schema`, `HumanMessage`, `langchain_core.messages`, `HumanMessage`],
|
||||
[`langchain.schema`, `AIMessage`, `langchain_core.messages`, `AIMessage`],
|
||||
[`langchain.schema`, `SystemMessage`, `langchain_core.messages`, `SystemMessage`],
|
||||
[`langchain.schema`, `messages_from_dict`, `langchain_core.messages`, `messages_from_dict`],
|
||||
[`langchain.schema`, `messages_to_dict`, `langchain_core.messages`, `messages_to_dict`],
|
||||
[`langchain.schema`, `message_to_dict`, `langchain_core.messages`, `message_to_dict`],
|
||||
[`langchain.schema`, `_message_to_dict`, `langchain_core.messages`, `message_to_dict`],
|
||||
[`langchain.schema`, `_message_from_dict`, `langchain_core.messages`, `_message_from_dict`],
|
||||
[`langchain.schema`, `get_buffer_string`, `langchain_core.messages`, `get_buffer_string`],
|
||||
[`langchain.schema`, `RunInfo`, `langchain_core.outputs`, `RunInfo`],
|
||||
[`langchain.schema`, `LLMResult`, `langchain_core.outputs`, `LLMResult`],
|
||||
[`langchain.schema`, `ChatResult`, `langchain_core.outputs`, `ChatResult`],
|
||||
[`langchain.schema`, `ChatGeneration`, `langchain_core.outputs`, `ChatGeneration`],
|
||||
[`langchain.schema`, `Generation`, `langchain_core.outputs`, `Generation`],
|
||||
[`langchain.schema`, `PromptValue`, `langchain_core.prompt_values`, `PromptValue`],
|
||||
[`langchain.schema`, `LangChainException`, `langchain_core.exceptions`, `LangChainException`],
|
||||
[`langchain.schema`, `BaseRetriever`, `langchain_core.retrievers`, `BaseRetriever`],
|
||||
[`langchain.schema`, `Memory`, `langchain_core.memory`, `BaseMemory`],
|
||||
[`langchain.schema`, `OutputParserException`, `langchain_core.exceptions`, `OutputParserException`],
|
||||
[`langchain.schema`, `StrOutputParser`, `langchain_core.output_parsers`, `StrOutputParser`],
|
||||
[`langchain.schema`, `BaseOutputParser`, `langchain_core.output_parsers`, `BaseOutputParser`],
|
||||
[`langchain.schema`, `BaseLLMOutputParser`, `langchain_core.output_parsers`, `BaseLLMOutputParser`],
|
||||
[`langchain.schema`, `BasePromptTemplate`, `langchain_core.prompts`, `BasePromptTemplate`],
|
||||
[`langchain.schema`, `format_document`, `langchain_core.prompts`, `format_document`],
|
||||
[`langchain.schema.agent`, `AgentAction`, `langchain_core.agents`, `AgentAction`],
|
||||
[`langchain.schema.agent`, `AgentActionMessageLog`, `langchain_core.agents`, `AgentActionMessageLog`],
|
||||
[`langchain.schema.agent`, `AgentFinish`, `langchain_core.agents`, `AgentFinish`],
|
||||
[`langchain.schema.cache`, `BaseCache`, `langchain_core.caches`, `BaseCache`],
|
||||
[`langchain.schema.callbacks.base`, `RetrieverManagerMixin`, `langchain_core.callbacks`, `RetrieverManagerMixin`],
|
||||
[`langchain.schema.callbacks.base`, `LLMManagerMixin`, `langchain_core.callbacks`, `LLMManagerMixin`],
|
||||
[`langchain.schema.callbacks.base`, `ChainManagerMixin`, `langchain_core.callbacks`, `ChainManagerMixin`],
|
||||
[`langchain.schema.callbacks.base`, `ToolManagerMixin`, `langchain_core.callbacks`, `ToolManagerMixin`],
|
||||
[`langchain.schema.callbacks.base`, `CallbackManagerMixin`, `langchain_core.callbacks`, `CallbackManagerMixin`],
|
||||
[`langchain.schema.callbacks.base`, `RunManagerMixin`, `langchain_core.callbacks`, `RunManagerMixin`],
|
||||
[`langchain.schema.callbacks.base`, `BaseCallbackHandler`, `langchain_core.callbacks`, `BaseCallbackHandler`],
|
||||
[`langchain.schema.callbacks.base`, `AsyncCallbackHandler`, `langchain_core.callbacks`, `AsyncCallbackHandler`],
|
||||
[`langchain.schema.callbacks.base`, `BaseCallbackManager`, `langchain_core.callbacks`, `BaseCallbackManager`],
|
||||
[`langchain.schema.callbacks.manager`, `tracing_enabled`, `langchain_core.tracers.context`, `tracing_enabled`],
|
||||
[`langchain.schema.callbacks.manager`, `tracing_v2_enabled`, `langchain_core.tracers.context`, `tracing_v2_enabled`],
|
||||
[`langchain.schema.callbacks.manager`, `collect_runs`, `langchain_core.tracers.context`, `collect_runs`],
|
||||
[`langchain.schema.callbacks.manager`, `trace_as_chain_group`, `langchain_core.callbacks.manager`, `trace_as_chain_group`],
|
||||
[`langchain.schema.callbacks.manager`, `handle_event`, `langchain_core.callbacks.manager`, `handle_event`],
|
||||
[`langchain.schema.callbacks.manager`, `BaseRunManager`, `langchain_core.callbacks`, `BaseRunManager`],
|
||||
[`langchain.schema.callbacks.manager`, `RunManager`, `langchain_core.callbacks`, `RunManager`],
|
||||
[`langchain.schema.callbacks.manager`, `ParentRunManager`, `langchain_core.callbacks`, `ParentRunManager`],
|
||||
[`langchain.schema.callbacks.manager`, `AsyncRunManager`, `langchain_core.callbacks`, `AsyncRunManager`],
|
||||
[`langchain.schema.callbacks.manager`, `AsyncParentRunManager`, `langchain_core.callbacks`, `AsyncParentRunManager`],
|
||||
[`langchain.schema.callbacks.manager`, `CallbackManagerForLLMRun`, `langchain_core.callbacks`, `CallbackManagerForLLMRun`],
|
||||
[`langchain.schema.callbacks.manager`, `AsyncCallbackManagerForLLMRun`, `langchain_core.callbacks`, `AsyncCallbackManagerForLLMRun`],
|
||||
[`langchain.schema.callbacks.manager`, `CallbackManagerForChainRun`, `langchain_core.callbacks`, `CallbackManagerForChainRun`],
|
||||
[`langchain.schema.callbacks.manager`, `AsyncCallbackManagerForChainRun`, `langchain_core.callbacks`, `AsyncCallbackManagerForChainRun`],
|
||||
[`langchain.schema.callbacks.manager`, `CallbackManagerForToolRun`, `langchain_core.callbacks`, `CallbackManagerForToolRun`],
|
||||
[`langchain.schema.callbacks.manager`, `AsyncCallbackManagerForToolRun`, `langchain_core.callbacks`, `AsyncCallbackManagerForToolRun`],
|
||||
[`langchain.schema.callbacks.manager`, `CallbackManagerForRetrieverRun`, `langchain_core.callbacks`, `CallbackManagerForRetrieverRun`],
|
||||
[`langchain.schema.callbacks.manager`, `AsyncCallbackManagerForRetrieverRun`, `langchain_core.callbacks`, `AsyncCallbackManagerForRetrieverRun`],
|
||||
[`langchain.schema.callbacks.manager`, `CallbackManager`, `langchain_core.callbacks`, `CallbackManager`],
|
||||
[`langchain.schema.callbacks.manager`, `CallbackManagerForChainGroup`, `langchain_core.callbacks`, `CallbackManagerForChainGroup`],
|
||||
[`langchain.schema.callbacks.manager`, `AsyncCallbackManager`, `langchain_core.callbacks`, `AsyncCallbackManager`],
|
||||
[`langchain.schema.callbacks.manager`, `AsyncCallbackManagerForChainGroup`, `langchain_core.callbacks`, `AsyncCallbackManagerForChainGroup`],
|
||||
[`langchain.schema.callbacks.manager`, `register_configure_hook`, `langchain_core.tracers.context`, `register_configure_hook`],
|
||||
[`langchain.schema.callbacks.manager`, `env_var_is_set`, `langchain_core.utils.env`, `env_var_is_set`],
|
||||
[`langchain.schema.callbacks.stdout`, `StdOutCallbackHandler`, `langchain_core.callbacks`, `StdOutCallbackHandler`],
|
||||
[`langchain.schema.callbacks.streaming_stdout`, `StreamingStdOutCallbackHandler`, `langchain_core.callbacks`, `StreamingStdOutCallbackHandler`],
|
||||
[`langchain.schema.callbacks.tracers.base`, `TracerException`, `langchain_core.exceptions`, `TracerException`],
|
||||
[`langchain.schema.callbacks.tracers.base`, `BaseTracer`, `langchain_core.tracers`, `BaseTracer`],
|
||||
[`langchain.schema.callbacks.tracers.evaluation`, `wait_for_all_evaluators`, `langchain_core.tracers.evaluation`, `wait_for_all_evaluators`],
|
||||
[`langchain.schema.callbacks.tracers.evaluation`, `EvaluatorCallbackHandler`, `langchain_core.tracers`, `EvaluatorCallbackHandler`],
|
||||
[`langchain.schema.callbacks.tracers.langchain`, `log_error_once`, `langchain_core.tracers.langchain`, `log_error_once`],
|
||||
[`langchain.schema.callbacks.tracers.langchain`, `wait_for_all_tracers`, `langchain_core.tracers.langchain`, `wait_for_all_tracers`],
|
||||
[`langchain.schema.callbacks.tracers.langchain`, `get_client`, `langchain_core.tracers.langchain`, `get_client`],
|
||||
[`langchain.schema.callbacks.tracers.langchain`, `LangChainTracer`, `langchain_core.tracers`, `LangChainTracer`],
|
||||
[`langchain.schema.callbacks.tracers.langchain_v1`, `get_headers`, `langchain_core.tracers.langchain_v1`, `get_headers`],
|
||||
[`langchain.schema.callbacks.tracers.langchain_v1`, `LangChainTracerV1`, `langchain_core.tracers.langchain_v1`, `LangChainTracerV1`],
|
||||
[`langchain.schema.callbacks.tracers.log_stream`, `LogEntry`, `langchain_core.tracers.log_stream`, `LogEntry`],
|
||||
[`langchain.schema.callbacks.tracers.log_stream`, `RunState`, `langchain_core.tracers.log_stream`, `RunState`],
|
||||
[`langchain.schema.callbacks.tracers.log_stream`, `RunLogPatch`, `langchain_core.tracers`, `RunLogPatch`],
|
||||
[`langchain.schema.callbacks.tracers.log_stream`, `RunLog`, `langchain_core.tracers`, `RunLog`],
|
||||
[`langchain.schema.callbacks.tracers.log_stream`, `LogStreamCallbackHandler`, `langchain_core.tracers`, `LogStreamCallbackHandler`],
|
||||
[`langchain.schema.callbacks.tracers.root_listeners`, `RootListenersTracer`, `langchain_core.tracers.root_listeners`, `RootListenersTracer`],
|
||||
[`langchain.schema.callbacks.tracers.run_collector`, `RunCollectorCallbackHandler`, `langchain_core.tracers.run_collector`, `RunCollectorCallbackHandler`],
|
||||
[`langchain.schema.callbacks.tracers.schemas`, `RunTypeEnum`, `langchain_core.tracers.schemas`, `RunTypeEnum`],
|
||||
[`langchain.schema.callbacks.tracers.schemas`, `TracerSessionV1Base`, `langchain_core.tracers.schemas`, `TracerSessionV1Base`],
|
||||
[`langchain.schema.callbacks.tracers.schemas`, `TracerSessionV1Create`, `langchain_core.tracers.schemas`, `TracerSessionV1Create`],
|
||||
[`langchain.schema.callbacks.tracers.schemas`, `TracerSessionV1`, `langchain_core.tracers.schemas`, `TracerSessionV1`],
|
||||
[`langchain.schema.callbacks.tracers.schemas`, `TracerSessionBase`, `langchain_core.tracers.schemas`, `TracerSessionBase`],
|
||||
[`langchain.schema.callbacks.tracers.schemas`, `TracerSession`, `langchain_core.tracers.schemas`, `TracerSession`],
|
||||
[`langchain.schema.callbacks.tracers.schemas`, `BaseRun`, `langchain_core.tracers.schemas`, `BaseRun`],
|
||||
[`langchain.schema.callbacks.tracers.schemas`, `LLMRun`, `langchain_core.tracers.schemas`, `LLMRun`],
|
||||
[`langchain.schema.callbacks.tracers.schemas`, `ChainRun`, `langchain_core.tracers.schemas`, `ChainRun`],
|
||||
[`langchain.schema.callbacks.tracers.schemas`, `ToolRun`, `langchain_core.tracers.schemas`, `ToolRun`],
|
||||
[`langchain.schema.callbacks.tracers.schemas`, `Run`, `langchain_core.tracers`, `Run`],
|
||||
[`langchain.schema.callbacks.tracers.stdout`, `try_json_stringify`, `langchain_core.tracers.stdout`, `try_json_stringify`],
|
||||
[`langchain.schema.callbacks.tracers.stdout`, `elapsed`, `langchain_core.tracers.stdout`, `elapsed`],
|
||||
[`langchain.schema.callbacks.tracers.stdout`, `FunctionCallbackHandler`, `langchain_core.tracers.stdout`, `FunctionCallbackHandler`],
|
||||
[`langchain.schema.callbacks.tracers.stdout`, `ConsoleCallbackHandler`, `langchain_core.tracers`, `ConsoleCallbackHandler`],
|
||||
[`langchain.schema.chat`, `ChatSession`, `langchain_core.chat_sessions`, `ChatSession`],
|
||||
[`langchain.schema.chat_history`, `BaseChatMessageHistory`, `langchain_core.chat_history`, `BaseChatMessageHistory`],
|
||||
[`langchain.schema.document`, `Document`, `langchain_core.documents`, `Document`],
|
||||
[`langchain.schema.document`, `BaseDocumentTransformer`, `langchain_core.documents`, `BaseDocumentTransformer`],
|
||||
[`langchain.schema.embeddings`, `Embeddings`, `langchain_core.embeddings`, `Embeddings`],
|
||||
[`langchain.schema.exceptions`, `LangChainException`, `langchain_core.exceptions`, `LangChainException`],
|
||||
[`langchain.schema.language_model`, `BaseLanguageModel`, `langchain_core.language_models`, `BaseLanguageModel`],
|
||||
[`langchain.schema.language_model`, `_get_token_ids_default_method`, `langchain_core.language_models.base`, `_get_token_ids_default_method`],
|
||||
[`langchain.schema.memory`, `BaseMemory`, `langchain_core.memory`, `BaseMemory`],
|
||||
[`langchain.schema.messages`, `get_buffer_string`, `langchain_core.messages`, `get_buffer_string`],
|
||||
[`langchain.schema.messages`, `BaseMessage`, `langchain_core.messages`, `BaseMessage`],
|
||||
[`langchain.schema.messages`, `merge_content`, `langchain_core.messages`, `merge_content`],
|
||||
[`langchain.schema.messages`, `BaseMessageChunk`, `langchain_core.messages`, `BaseMessageChunk`],
|
||||
[`langchain.schema.messages`, `HumanMessage`, `langchain_core.messages`, `HumanMessage`],
|
||||
[`langchain.schema.messages`, `HumanMessageChunk`, `langchain_core.messages`, `HumanMessageChunk`],
|
||||
[`langchain.schema.messages`, `AIMessage`, `langchain_core.messages`, `AIMessage`],
|
||||
[`langchain.schema.messages`, `AIMessageChunk`, `langchain_core.messages`, `AIMessageChunk`],
|
||||
[`langchain.schema.messages`, `SystemMessage`, `langchain_core.messages`, `SystemMessage`],
|
||||
[`langchain.schema.messages`, `SystemMessageChunk`, `langchain_core.messages`, `SystemMessageChunk`],
|
||||
[`langchain.schema.messages`, `FunctionMessage`, `langchain_core.messages`, `FunctionMessage`],
|
||||
[`langchain.schema.messages`, `FunctionMessageChunk`, `langchain_core.messages`, `FunctionMessageChunk`],
|
||||
[`langchain.schema.messages`, `ToolMessage`, `langchain_core.messages`, `ToolMessage`],
|
||||
[`langchain.schema.messages`, `ToolMessageChunk`, `langchain_core.messages`, `ToolMessageChunk`],
|
||||
[`langchain.schema.messages`, `ChatMessage`, `langchain_core.messages`, `ChatMessage`],
|
||||
[`langchain.schema.messages`, `ChatMessageChunk`, `langchain_core.messages`, `ChatMessageChunk`],
|
||||
[`langchain.schema.messages`, `messages_to_dict`, `langchain_core.messages`, `messages_to_dict`],
|
||||
[`langchain.schema.messages`, `messages_from_dict`, `langchain_core.messages`, `messages_from_dict`],
|
||||
[`langchain.schema.messages`, `_message_to_dict`, `langchain_core.messages`, `message_to_dict`],
|
||||
[`langchain.schema.messages`, `_message_from_dict`, `langchain_core.messages`, `_message_from_dict`],
|
||||
[`langchain.schema.messages`, `message_to_dict`, `langchain_core.messages`, `message_to_dict`],
|
||||
[`langchain.schema.output`, `Generation`, `langchain_core.outputs`, `Generation`],
|
||||
[`langchain.schema.output`, `GenerationChunk`, `langchain_core.outputs`, `GenerationChunk`],
|
||||
[`langchain.schema.output`, `ChatGeneration`, `langchain_core.outputs`, `ChatGeneration`],
|
||||
[`langchain.schema.output`, `ChatGenerationChunk`, `langchain_core.outputs`, `ChatGenerationChunk`],
|
||||
[`langchain.schema.output`, `RunInfo`, `langchain_core.outputs`, `RunInfo`],
|
||||
[`langchain.schema.output`, `ChatResult`, `langchain_core.outputs`, `ChatResult`],
|
||||
[`langchain.schema.output`, `LLMResult`, `langchain_core.outputs`, `LLMResult`],
|
||||
[`langchain.schema.output_parser`, `BaseLLMOutputParser`, `langchain_core.output_parsers`, `BaseLLMOutputParser`],
|
||||
[`langchain.schema.output_parser`, `BaseGenerationOutputParser`, `langchain_core.output_parsers`, `BaseGenerationOutputParser`],
|
||||
[`langchain.schema.output_parser`, `BaseOutputParser`, `langchain_core.output_parsers`, `BaseOutputParser`],
|
||||
[`langchain.schema.output_parser`, `BaseTransformOutputParser`, `langchain_core.output_parsers`, `BaseTransformOutputParser`],
|
||||
[`langchain.schema.output_parser`, `BaseCumulativeTransformOutputParser`, `langchain_core.output_parsers`, `BaseCumulativeTransformOutputParser`],
|
||||
[`langchain.schema.output_parser`, `NoOpOutputParser`, `langchain_core.output_parsers`, `StrOutputParser`],
|
||||
[`langchain.schema.output_parser`, `StrOutputParser`, `langchain_core.output_parsers`, `StrOutputParser`],
|
||||
[`langchain.schema.output_parser`, `OutputParserException`, `langchain_core.exceptions`, `OutputParserException`],
|
||||
[`langchain.schema.prompt`, `PromptValue`, `langchain_core.prompt_values`, `PromptValue`],
|
||||
[`langchain.schema.prompt_template`, `BasePromptTemplate`, `langchain_core.prompts`, `BasePromptTemplate`],
|
||||
[`langchain.schema.prompt_template`, `format_document`, `langchain_core.prompts`, `format_document`],
|
||||
[`langchain.schema.retriever`, `BaseRetriever`, `langchain_core.retrievers`, `BaseRetriever`],
|
||||
[`langchain.schema.runnable`, `ConfigurableField`, `langchain_core.runnables`, `ConfigurableField`],
|
||||
[`langchain.schema.runnable`, `ConfigurableFieldSingleOption`, `langchain_core.runnables`, `ConfigurableFieldSingleOption`],
|
||||
[`langchain.schema.runnable`, `ConfigurableFieldMultiOption`, `langchain_core.runnables`, `ConfigurableFieldMultiOption`],
|
||||
[`langchain.schema.runnable`, `patch_config`, `langchain_core.runnables`, `patch_config`],
|
||||
[`langchain.schema.runnable`, `RouterInput`, `langchain_core.runnables`, `RouterInput`],
|
||||
[`langchain.schema.runnable`, `RouterRunnable`, `langchain_core.runnables`, `RouterRunnable`],
|
||||
[`langchain.schema.runnable`, `Runnable`, `langchain_core.runnables`, `Runnable`],
|
||||
[`langchain.schema.runnable`, `RunnableSerializable`, `langchain_core.runnables`, `RunnableSerializable`],
|
||||
[`langchain.schema.runnable`, `RunnableBinding`, `langchain_core.runnables`, `RunnableBinding`],
|
||||
[`langchain.schema.runnable`, `RunnableBranch`, `langchain_core.runnables`, `RunnableBranch`],
|
||||
[`langchain.schema.runnable`, `RunnableConfig`, `langchain_core.runnables`, `RunnableConfig`],
|
||||
[`langchain.schema.runnable`, `RunnableGenerator`, `langchain_core.runnables`, `RunnableGenerator`],
|
||||
[`langchain.schema.runnable`, `RunnableLambda`, `langchain_core.runnables`, `RunnableLambda`],
|
||||
[`langchain.schema.runnable`, `RunnableMap`, `langchain_core.runnables`, `RunnableMap`],
|
||||
[`langchain.schema.runnable`, `RunnableParallel`, `langchain_core.runnables`, `RunnableParallel`],
|
||||
[`langchain.schema.runnable`, `RunnablePassthrough`, `langchain_core.runnables`, `RunnablePassthrough`],
|
||||
[`langchain.schema.runnable`, `RunnableSequence`, `langchain_core.runnables`, `RunnableSequence`],
|
||||
[`langchain.schema.runnable`, `RunnableWithFallbacks`, `langchain_core.runnables`, `RunnableWithFallbacks`],
|
||||
[`langchain.schema.runnable.base`, `Runnable`, `langchain_core.runnables`, `Runnable`],
|
||||
[`langchain.schema.runnable.base`, `RunnableSerializable`, `langchain_core.runnables`, `RunnableSerializable`],
|
||||
[`langchain.schema.runnable.base`, `RunnableSequence`, `langchain_core.runnables`, `RunnableSequence`],
|
||||
[`langchain.schema.runnable.base`, `RunnableParallel`, `langchain_core.runnables`, `RunnableParallel`],
|
||||
[`langchain.schema.runnable.base`, `RunnableGenerator`, `langchain_core.runnables`, `RunnableGenerator`],
|
||||
[`langchain.schema.runnable.base`, `RunnableLambda`, `langchain_core.runnables`, `RunnableLambda`],
|
||||
[`langchain.schema.runnable.base`, `RunnableEachBase`, `langchain_core.runnables.base`, `RunnableEachBase`],
|
||||
[`langchain.schema.runnable.base`, `RunnableEach`, `langchain_core.runnables.base`, `RunnableEach`],
|
||||
[`langchain.schema.runnable.base`, `RunnableBindingBase`, `langchain_core.runnables.base`, `RunnableBindingBase`],
|
||||
[`langchain.schema.runnable.base`, `RunnableBinding`, `langchain_core.runnables`, `RunnableBinding`],
|
||||
[`langchain.schema.runnable.base`, `RunnableMap`, `langchain_core.runnables`, `RunnableMap`],
|
||||
[`langchain.schema.runnable.base`, `coerce_to_runnable`, `langchain_core.runnables.base`, `coerce_to_runnable`],
|
||||
[`langchain.schema.runnable.branch`, `RunnableBranch`, `langchain_core.runnables`, `RunnableBranch`],
|
||||
[`langchain.schema.runnable.config`, `EmptyDict`, `langchain_core.runnables.config`, `EmptyDict`],
|
||||
[`langchain.schema.runnable.config`, `RunnableConfig`, `langchain_core.runnables`, `RunnableConfig`],
|
||||
[`langchain.schema.runnable.config`, `ensure_config`, `langchain_core.runnables`, `ensure_config`],
|
||||
[`langchain.schema.runnable.config`, `get_config_list`, `langchain_core.runnables`, `get_config_list`],
|
||||
[`langchain.schema.runnable.config`, `patch_config`, `langchain_core.runnables`, `patch_config`],
|
||||
[`langchain.schema.runnable.config`, `merge_configs`, `langchain_core.runnables.config`, `merge_configs`],
|
||||
[`langchain.schema.runnable.config`, `acall_func_with_variable_args`, `langchain_core.runnables.config`, `acall_func_with_variable_args`],
|
||||
[`langchain.schema.runnable.config`, `call_func_with_variable_args`, `langchain_core.runnables.config`, `call_func_with_variable_args`],
|
||||
[`langchain.schema.runnable.config`, `get_callback_manager_for_config`, `langchain_core.runnables.config`, `get_callback_manager_for_config`],
|
||||
[`langchain.schema.runnable.config`, `get_async_callback_manager_for_config`, `langchain_core.runnables.config`, `get_async_callback_manager_for_config`],
|
||||
[`langchain.schema.runnable.config`, `get_executor_for_config`, `langchain_core.runnables.config`, `get_executor_for_config`],
|
||||
[`langchain.schema.runnable.configurable`, `DynamicRunnable`, `langchain_core.runnables.configurable`, `DynamicRunnable`],
|
||||
[`langchain.schema.runnable.configurable`, `RunnableConfigurableFields`, `langchain_core.runnables.configurable`, `RunnableConfigurableFields`],
|
||||
[`langchain.schema.runnable.configurable`, `StrEnum`, `langchain_core.runnables.configurable`, `StrEnum`],
|
||||
[`langchain.schema.runnable.configurable`, `RunnableConfigurableAlternatives`, `langchain_core.runnables.configurable`, `RunnableConfigurableAlternatives`],
|
||||
[`langchain.schema.runnable.configurable`, `make_options_spec`, `langchain_core.runnables.configurable`, `make_options_spec`],
|
||||
[`langchain.schema.runnable.fallbacks`, `RunnableWithFallbacks`, `langchain_core.runnables`, `RunnableWithFallbacks`],
|
||||
[`langchain.schema.runnable.history`, `RunnableWithMessageHistory`, `langchain_core.runnables.history`, `RunnableWithMessageHistory`],
|
||||
[`langchain.schema.runnable.passthrough`, `aidentity`, `langchain_core.runnables.passthrough`, `aidentity`],
|
||||
[`langchain.schema.runnable.passthrough`, `identity`, `langchain_core.runnables.passthrough`, `identity`],
|
||||
[`langchain.schema.runnable.passthrough`, `RunnablePassthrough`, `langchain_core.runnables`, `RunnablePassthrough`],
|
||||
[`langchain.schema.runnable.passthrough`, `RunnableAssign`, `langchain_core.runnables`, `RunnableAssign`],
|
||||
[`langchain.schema.runnable.retry`, `RunnableRetry`, `langchain_core.runnables.retry`, `RunnableRetry`],
|
||||
[`langchain.schema.runnable.router`, `RouterInput`, `langchain_core.runnables`, `RouterInput`],
|
||||
[`langchain.schema.runnable.router`, `RouterRunnable`, `langchain_core.runnables`, `RouterRunnable`],
|
||||
[`langchain.schema.runnable.utils`, `accepts_run_manager`, `langchain_core.runnables.utils`, `accepts_run_manager`],
|
||||
[`langchain.schema.runnable.utils`, `accepts_config`, `langchain_core.runnables.utils`, `accepts_config`],
|
||||
[`langchain.schema.runnable.utils`, `IsLocalDict`, `langchain_core.runnables.utils`, `IsLocalDict`],
|
||||
[`langchain.schema.runnable.utils`, `IsFunctionArgDict`, `langchain_core.runnables.utils`, `IsFunctionArgDict`],
|
||||
[`langchain.schema.runnable.utils`, `GetLambdaSource`, `langchain_core.runnables.utils`, `GetLambdaSource`],
|
||||
[`langchain.schema.runnable.utils`, `get_function_first_arg_dict_keys`, `langchain_core.runnables.utils`, `get_function_first_arg_dict_keys`],
|
||||
[`langchain.schema.runnable.utils`, `get_lambda_source`, `langchain_core.runnables.utils`, `get_lambda_source`],
|
||||
[`langchain.schema.runnable.utils`, `indent_lines_after_first`, `langchain_core.runnables.utils`, `indent_lines_after_first`],
|
||||
[`langchain.schema.runnable.utils`, `AddableDict`, `langchain_core.runnables`, `AddableDict`],
|
||||
[`langchain.schema.runnable.utils`, `SupportsAdd`, `langchain_core.runnables.utils`, `SupportsAdd`],
|
||||
[`langchain.schema.runnable.utils`, `add`, `langchain_core.runnables`, `add`],
|
||||
[`langchain.schema.runnable.utils`, `ConfigurableField`, `langchain_core.runnables`, `ConfigurableField`],
|
||||
[`langchain.schema.runnable.utils`, `ConfigurableFieldSingleOption`, `langchain_core.runnables`, `ConfigurableFieldSingleOption`],
|
||||
[`langchain.schema.runnable.utils`, `ConfigurableFieldMultiOption`, `langchain_core.runnables`, `ConfigurableFieldMultiOption`],
|
||||
[`langchain.schema.runnable.utils`, `ConfigurableFieldSpec`, `langchain_core.runnables`, `ConfigurableFieldSpec`],
|
||||
[`langchain.schema.runnable.utils`, `get_unique_config_specs`, `langchain_core.runnables.utils`, `get_unique_config_specs`],
|
||||
[`langchain.schema.runnable.utils`, `aadd`, `langchain_core.runnables`, `aadd`],
|
||||
[`langchain.schema.runnable.utils`, `gated_coro`, `langchain_core.runnables.utils`, `gated_coro`],
|
||||
[`langchain.schema.runnable.utils`, `gather_with_concurrency`, `langchain_core.runnables.utils`, `gather_with_concurrency`],
|
||||
[`langchain.schema.storage`, `BaseStore`, `langchain_core.stores`, `BaseStore`],
|
||||
[`langchain.schema.vectorstore`, `VectorStore`, `langchain_core.vectorstores`, `VectorStore`],
|
||||
[`langchain.schema.vectorstore`, `VectorStoreRetriever`, `langchain_core.vectorstores`, `VectorStoreRetriever`],
|
||||
[`langchain.tools`, `BaseTool`, `langchain_core.tools`, `BaseTool`],
|
||||
[`langchain.tools`, `StructuredTool`, `langchain_core.tools`, `StructuredTool`],
|
||||
[`langchain.tools`, `Tool`, `langchain_core.tools`, `Tool`],
|
||||
[`langchain.tools`, `format_tool_to_openai_function`, `langchain_core.utils.function_calling`, `format_tool_to_openai_function`],
|
||||
[`langchain.tools`, `tool`, `langchain_core.tools`, `tool`],
|
||||
[`langchain.tools.base`, `SchemaAnnotationError`, `langchain_core.tools`, `SchemaAnnotationError`],
|
||||
[`langchain.tools.base`, `create_schema_from_function`, `langchain_core.tools`, `create_schema_from_function`],
|
||||
[`langchain.tools.base`, `ToolException`, `langchain_core.tools`, `ToolException`],
|
||||
[`langchain.tools.base`, `BaseTool`, `langchain_core.tools`, `BaseTool`],
|
||||
[`langchain.tools.base`, `Tool`, `langchain_core.tools`, `Tool`],
|
||||
[`langchain.tools.base`, `StructuredTool`, `langchain_core.tools`, `StructuredTool`],
|
||||
[`langchain.tools.base`, `tool`, `langchain_core.tools`, `tool`],
|
||||
[`langchain.tools.convert_to_openai`, `format_tool_to_openai_function`, `langchain_core.utils.function_calling`, `format_tool_to_openai_function`],
|
||||
[`langchain.tools.render`, `format_tool_to_openai_tool`, `langchain_core.utils.function_calling`, `format_tool_to_openai_tool`],
|
||||
[`langchain.tools.render`, `format_tool_to_openai_function`, `langchain_core.utils.function_calling`, `format_tool_to_openai_function`],
|
||||
[`langchain.utilities.loading`, `try_load_from_hub`, `langchain_core.utils`, `try_load_from_hub`],
|
||||
[`langchain.utils`, `StrictFormatter`, `langchain_core.utils`, `StrictFormatter`],
|
||||
[`langchain.utils`, `check_package_version`, `langchain_core.utils`, `check_package_version`],
|
||||
[`langchain.utils`, `comma_list`, `langchain_core.utils`, `comma_list`],
|
||||
[`langchain.utils`, `convert_to_secret_str`, `langchain_core.utils`, `convert_to_secret_str`],
|
||||
[`langchain.utils`, `get_bolded_text`, `langchain_core.utils`, `get_bolded_text`],
|
||||
[`langchain.utils`, `get_color_mapping`, `langchain_core.utils`, `get_color_mapping`],
|
||||
[`langchain.utils`, `get_colored_text`, `langchain_core.utils`, `get_colored_text`],
|
||||
[`langchain.utils`, `get_from_dict_or_env`, `langchain_core.utils`, `get_from_dict_or_env`],
|
||||
[`langchain.utils`, `get_from_env`, `langchain_core.utils`, `get_from_env`],
|
||||
[`langchain.utils`, `get_pydantic_field_names`, `langchain_core.utils`, `get_pydantic_field_names`],
|
||||
[`langchain.utils`, `guard_import`, `langchain_core.utils`, `guard_import`],
|
||||
[`langchain.utils`, `mock_now`, `langchain_core.utils`, `mock_now`],
|
||||
[`langchain.utils`, `print_text`, `langchain_core.utils`, `print_text`],
|
||||
[`langchain.utils`, `raise_for_status_with_text`, `langchain_core.utils`, `raise_for_status_with_text`],
|
||||
[`langchain.utils`, `stringify_dict`, `langchain_core.utils`, `stringify_dict`],
|
||||
[`langchain.utils`, `stringify_value`, `langchain_core.utils`, `stringify_value`],
|
||||
[`langchain.utils`, `xor_args`, `langchain_core.utils`, `xor_args`],
|
||||
[`langchain.utils.aiter`, `py_anext`, `langchain_core.utils.aiter`, `py_anext`],
|
||||
[`langchain.utils.aiter`, `NoLock`, `langchain_core.utils.aiter`, `NoLock`],
|
||||
[`langchain.utils.aiter`, `Tee`, `langchain_core.utils.aiter`, `Tee`],
|
||||
[`langchain.utils.env`, `get_from_dict_or_env`, `langchain_core.utils`, `get_from_dict_or_env`],
|
||||
[`langchain.utils.env`, `get_from_env`, `langchain_core.utils`, `get_from_env`],
|
||||
[`langchain.utils.formatting`, `StrictFormatter`, `langchain_core.utils`, `StrictFormatter`],
|
||||
[`langchain.utils.html`, `find_all_links`, `langchain_core.utils.html`, `find_all_links`],
|
||||
[`langchain.utils.html`, `extract_sub_links`, `langchain_core.utils.html`, `extract_sub_links`],
|
||||
[`langchain.utils.input`, `get_color_mapping`, `langchain_core.utils`, `get_color_mapping`],
|
||||
[`langchain.utils.input`, `get_colored_text`, `langchain_core.utils`, `get_colored_text`],
|
||||
[`langchain.utils.input`, `get_bolded_text`, `langchain_core.utils`, `get_bolded_text`],
|
||||
[`langchain.utils.input`, `print_text`, `langchain_core.utils`, `print_text`],
|
||||
[`langchain.utils.iter`, `NoLock`, `langchain_core.utils.iter`, `NoLock`],
|
||||
[`langchain.utils.iter`, `tee_peer`, `langchain_core.utils.iter`, `tee_peer`],
|
||||
[`langchain.utils.iter`, `Tee`, `langchain_core.utils.iter`, `Tee`],
|
||||
[`langchain.utils.iter`, `batch_iterate`, `langchain_core.utils.iter`, `batch_iterate`],
|
||||
[`langchain.utils.json_schema`, `_retrieve_ref`, `langchain_core.utils.json_schema`, `_retrieve_ref`],
|
||||
[`langchain.utils.json_schema`, `_dereference_refs_helper`, `langchain_core.utils.json_schema`, `_dereference_refs_helper`],
|
||||
[`langchain.utils.json_schema`, `_infer_skip_keys`, `langchain_core.utils.json_schema`, `_infer_skip_keys`],
|
||||
[`langchain.utils.json_schema`, `dereference_refs`, `langchain_core.utils.json_schema`, `dereference_refs`],
|
||||
[`langchain.utils.loading`, `try_load_from_hub`, `langchain_core.utils`, `try_load_from_hub`],
|
||||
[`langchain.utils.openai_functions`, `FunctionDescription`, `langchain_core.utils.function_calling`, `FunctionDescription`],
|
||||
[`langchain.utils.openai_functions`, `ToolDescription`, `langchain_core.utils.function_calling`, `ToolDescription`],
|
||||
[`langchain.utils.openai_functions`, `convert_pydantic_to_openai_function`, `langchain_core.utils.function_calling`, `convert_pydantic_to_openai_function`],
|
||||
[`langchain.utils.openai_functions`, `convert_pydantic_to_openai_tool`, `langchain_core.utils.function_calling`, `convert_pydantic_to_openai_tool`],
|
||||
[`langchain.utils.pydantic`, `get_pydantic_major_version`, `langchain_core.utils.pydantic`, `get_pydantic_major_version`],
|
||||
[`langchain.utils.strings`, `stringify_value`, `langchain_core.utils`, `stringify_value`],
|
||||
[`langchain.utils.strings`, `stringify_dict`, `langchain_core.utils`, `stringify_dict`],
|
||||
[`langchain.utils.strings`, `comma_list`, `langchain_core.utils`, `comma_list`],
|
||||
[`langchain.utils.utils`, `xor_args`, `langchain_core.utils`, `xor_args`],
|
||||
[`langchain.utils.utils`, `raise_for_status_with_text`, `langchain_core.utils`, `raise_for_status_with_text`],
|
||||
[`langchain.utils.utils`, `mock_now`, `langchain_core.utils`, `mock_now`],
|
||||
[`langchain.utils.utils`, `guard_import`, `langchain_core.utils`, `guard_import`],
|
||||
[`langchain.utils.utils`, `check_package_version`, `langchain_core.utils`, `check_package_version`],
|
||||
[`langchain.utils.utils`, `get_pydantic_field_names`, `langchain_core.utils`, `get_pydantic_field_names`],
|
||||
[`langchain.utils.utils`, `build_extra_kwargs`, `langchain_core.utils`, `build_extra_kwargs`],
|
||||
[`langchain.utils.utils`, `convert_to_secret_str`, `langchain_core.utils`, `convert_to_secret_str`],
|
||||
[`langchain.vectorstores`, `VectorStore`, `langchain_core.vectorstores`, `VectorStore`],
|
||||
[`langchain.vectorstores.base`, `VectorStore`, `langchain_core.vectorstores`, `VectorStore`],
|
||||
[`langchain.vectorstores.base`, `VectorStoreRetriever`, `langchain_core.vectorstores`, `VectorStoreRetriever`],
|
||||
[`langchain.vectorstores.singlestoredb`, `SingleStoreDBRetriever`, `langchain_core.vectorstores`, `VectorStoreRetriever`]
|
||||
])
|
||||
}
|
||||
|
||||
// Add this for invoking directly
|
||||
langchain_migrate_langchain_to_core()
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,31 +0,0 @@
|
||||
|
||||
language python
|
||||
|
||||
// This migration is generated automatically - do not manually edit this file
|
||||
pattern langchain_migrate_langchain_to_textsplitters() {
|
||||
find_replace_imports(list=[
|
||||
[`langchain.text_splitter`, `TokenTextSplitter`, `langchain_text_splitters`, `TokenTextSplitter`],
|
||||
[`langchain.text_splitter`, `TextSplitter`, `langchain_text_splitters`, `TextSplitter`],
|
||||
[`langchain.text_splitter`, `Tokenizer`, `langchain_text_splitters`, `Tokenizer`],
|
||||
[`langchain.text_splitter`, `Language`, `langchain_text_splitters`, `Language`],
|
||||
[`langchain.text_splitter`, `RecursiveCharacterTextSplitter`, `langchain_text_splitters`, `RecursiveCharacterTextSplitter`],
|
||||
[`langchain.text_splitter`, `RecursiveJsonSplitter`, `langchain_text_splitters`, `RecursiveJsonSplitter`],
|
||||
[`langchain.text_splitter`, `LatexTextSplitter`, `langchain_text_splitters`, `LatexTextSplitter`],
|
||||
[`langchain.text_splitter`, `PythonCodeTextSplitter`, `langchain_text_splitters`, `PythonCodeTextSplitter`],
|
||||
[`langchain.text_splitter`, `KonlpyTextSplitter`, `langchain_text_splitters`, `KonlpyTextSplitter`],
|
||||
[`langchain.text_splitter`, `SpacyTextSplitter`, `langchain_text_splitters`, `SpacyTextSplitter`],
|
||||
[`langchain.text_splitter`, `NLTKTextSplitter`, `langchain_text_splitters`, `NLTKTextSplitter`],
|
||||
[`langchain.text_splitter`, `split_text_on_tokens`, `langchain_text_splitters`, `split_text_on_tokens`],
|
||||
[`langchain.text_splitter`, `SentenceTransformersTokenTextSplitter`, `langchain_text_splitters`, `SentenceTransformersTokenTextSplitter`],
|
||||
[`langchain.text_splitter`, `ElementType`, `langchain_text_splitters`, `ElementType`],
|
||||
[`langchain.text_splitter`, `HeaderType`, `langchain_text_splitters`, `HeaderType`],
|
||||
[`langchain.text_splitter`, `LineType`, `langchain_text_splitters`, `LineType`],
|
||||
[`langchain.text_splitter`, `HTMLHeaderTextSplitter`, `langchain_text_splitters`, `HTMLHeaderTextSplitter`],
|
||||
[`langchain.text_splitter`, `MarkdownHeaderTextSplitter`, `langchain_text_splitters`, `MarkdownHeaderTextSplitter`],
|
||||
[`langchain.text_splitter`, `MarkdownTextSplitter`, `langchain_text_splitters`, `MarkdownTextSplitter`],
|
||||
[`langchain.text_splitter`, `CharacterTextSplitter`, `langchain_text_splitters`, `CharacterTextSplitter`]
|
||||
])
|
||||
}
|
||||
|
||||
// Add this for invoking directly
|
||||
langchain_migrate_langchain_to_textsplitters()
|
||||
@@ -1,23 +0,0 @@
|
||||
|
||||
language python
|
||||
|
||||
// This migration is generated automatically - do not manually edit this file
|
||||
pattern langchain_migrate_openai() {
|
||||
find_replace_imports(list=[
|
||||
[`langchain_community.embeddings.openai`, `OpenAIEmbeddings`, `langchain_openai`, `OpenAIEmbeddings`],
|
||||
[`langchain_community.embeddings.azure_openai`, `AzureOpenAIEmbeddings`, `langchain_openai`, `AzureOpenAIEmbeddings`],
|
||||
[`langchain_community.chat_models.openai`, `ChatOpenAI`, `langchain_openai`, `ChatOpenAI`],
|
||||
[`langchain_community.chat_models.azure_openai`, `AzureChatOpenAI`, `langchain_openai`, `AzureChatOpenAI`],
|
||||
[`langchain_community.llms.openai`, `OpenAI`, `langchain_openai`, `OpenAI`],
|
||||
[`langchain_community.llms.openai`, `AzureOpenAI`, `langchain_openai`, `AzureOpenAI`],
|
||||
[`langchain_community.embeddings`, `AzureOpenAIEmbeddings`, `langchain_openai`, `AzureOpenAIEmbeddings`],
|
||||
[`langchain_community.embeddings`, `OpenAIEmbeddings`, `langchain_openai`, `OpenAIEmbeddings`],
|
||||
[`langchain_community.chat_models`, `AzureChatOpenAI`, `langchain_openai`, `AzureChatOpenAI`],
|
||||
[`langchain_community.chat_models`, `ChatOpenAI`, `langchain_openai`, `ChatOpenAI`],
|
||||
[`langchain_community.llms`, `AzureOpenAI`, `langchain_openai`, `AzureOpenAI`],
|
||||
[`langchain_community.llms`, `OpenAI`, `langchain_openai`, `OpenAI`]
|
||||
])
|
||||
}
|
||||
|
||||
// Add this for invoking directly
|
||||
langchain_migrate_openai()
|
||||
@@ -1,13 +0,0 @@
|
||||
|
||||
language python
|
||||
|
||||
// This migration is generated automatically - do not manually edit this file
|
||||
pattern langchain_migrate_pinecone() {
|
||||
find_replace_imports(list=[
|
||||
[`langchain_community.vectorstores.pinecone`, `Pinecone`, `langchain_pinecone`, `Pinecone`],
|
||||
[`langchain_community.vectorstores`, `Pinecone`, `langchain_pinecone`, `Pinecone`]
|
||||
])
|
||||
}
|
||||
|
||||
// Add this for invoking directly
|
||||
langchain_migrate_pinecone()
|
||||
@@ -1,36 +0,0 @@
|
||||
language python
|
||||
|
||||
// This migration is generated automatically - do not manually edit this file
|
||||
pattern replace_pydantic_v1_shim() {
|
||||
`from $IMPORT import $...` where {
|
||||
or {
|
||||
and {
|
||||
$IMPORT <: or {
|
||||
"langchain_core.pydantic_v1",
|
||||
"langchain.pydantic_v1",
|
||||
"langserve.pydantic_v1",
|
||||
},
|
||||
$IMPORT => `pydantic`
|
||||
},
|
||||
and {
|
||||
$IMPORT <: or {
|
||||
"langchain_core.pydantic_v1.data_classes",
|
||||
"langchain.pydantic_v1.data_classes",
|
||||
"langserve.pydantic_v1.data_classes",
|
||||
},
|
||||
$IMPORT => `pydantic.data_classes`
|
||||
},
|
||||
and {
|
||||
$IMPORT <: or {
|
||||
"langchain_core.pydantic_v1.main",
|
||||
"langchain.pydantic_v1.main",
|
||||
"langserve.pydantic_v1.main",
|
||||
},
|
||||
$IMPORT => `pydantic.main`
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add this for invoking directly
|
||||
replace_pydantic_v1_shim()
|
||||
@@ -0,0 +1,45 @@
|
||||
from enum import Enum
|
||||
from typing import List, Type
|
||||
|
||||
from libcst.codemod import ContextAwareTransformer
|
||||
from libcst.codemod.visitors import AddImportsVisitor, RemoveImportsVisitor
|
||||
|
||||
from langchain_cli.namespaces.migrate.codemods.replace_imports import (
|
||||
generate_import_replacer,
|
||||
)
|
||||
|
||||
|
||||
class Rule(str, Enum):
|
||||
langchain_to_community = "langchain_to_community"
|
||||
"""Replace deprecated langchain imports with current ones in community."""
|
||||
langchain_to_core = "langchain_to_core"
|
||||
"""Replace deprecated langchain imports with current ones in core."""
|
||||
langchain_to_text_splitters = "langchain_to_text_splitters"
|
||||
"""Replace deprecated langchain imports with current ones in text splitters."""
|
||||
community_to_core = "community_to_core"
|
||||
"""Replace deprecated community imports with current ones in core."""
|
||||
community_to_partner = "community_to_partner"
|
||||
"""Replace deprecated community imports with current ones in partner."""
|
||||
|
||||
|
||||
def gather_codemods(disabled: List[Rule]) -> List[Type[ContextAwareTransformer]]:
|
||||
"""Gather codemods based on the disabled rules."""
|
||||
codemods: List[Type[ContextAwareTransformer]] = []
|
||||
|
||||
# Import rules
|
||||
import_rules = {
|
||||
Rule.langchain_to_community,
|
||||
Rule.langchain_to_core,
|
||||
Rule.community_to_core,
|
||||
Rule.community_to_partner,
|
||||
Rule.langchain_to_text_splitters,
|
||||
}
|
||||
|
||||
# Find active import rules
|
||||
active_import_rules = import_rules - set(disabled)
|
||||
|
||||
if active_import_rules:
|
||||
codemods.append(generate_import_replacer(active_import_rules))
|
||||
# Those codemods need to be the last ones.
|
||||
codemods.extend([RemoveImportsVisitor, AddImportsVisitor])
|
||||
return codemods
|
||||
@@ -0,0 +1,18 @@
|
||||
[
|
||||
[
|
||||
"langchain_community.llms.anthropic.Anthropic",
|
||||
"langchain_anthropic.Anthropic"
|
||||
],
|
||||
[
|
||||
"langchain_community.chat_models.anthropic.ChatAnthropic",
|
||||
"langchain_anthropic.ChatAnthropic"
|
||||
],
|
||||
[
|
||||
"langchain_community.llms.Anthropic",
|
||||
"langchain_anthropic.Anthropic"
|
||||
],
|
||||
[
|
||||
"langchain_community.chat_models.ChatAnthropic",
|
||||
"langchain_anthropic.ChatAnthropic"
|
||||
]
|
||||
]
|
||||
@@ -0,0 +1,30 @@
|
||||
[
|
||||
[
|
||||
"langchain_community.vectorstores.astradb.AstraDB",
|
||||
"langchain_astradb.AstraDBVectorStore"
|
||||
],
|
||||
[
|
||||
"langchain_community.storage.astradb.AstraDBByteStore",
|
||||
"langchain_astradb.AstraDBByteStore"
|
||||
],
|
||||
[
|
||||
"langchain_community.storage.astradb.AstraDBStore",
|
||||
"langchain_astradb.AstraDBStore"
|
||||
],
|
||||
[
|
||||
"langchain_community.cache.AstraDBCache",
|
||||
"langchain_astradb.AstraDBCache"
|
||||
],
|
||||
[
|
||||
"langchain_community.cache.AstraDBSemanticCache",
|
||||
"langchain_astradb.AstraDBSemanticCache"
|
||||
],
|
||||
[
|
||||
"langchain_community.chat_message_histories.astradb.AstraDBChatMessageHistory",
|
||||
"langchain_astradb.AstraDBChatMessageHistory"
|
||||
],
|
||||
[
|
||||
"langchain_community.document_loaders.astradb.AstraDBLoader",
|
||||
"langchain_astradb.AstraDBLoader"
|
||||
]
|
||||
]
|
||||
@@ -51,17 +51,26 @@
|
||||
"langchain_community.document_loaders.blob_loaders.schema.BlobLoader",
|
||||
"langchain_core.document_loaders.BlobLoader"
|
||||
],
|
||||
["langchain_community.tools.BaseTool", "langchain_core.tools.BaseTool"],
|
||||
[
|
||||
"langchain_community.tools.BaseTool",
|
||||
"langchain_core.tools.BaseTool"
|
||||
],
|
||||
[
|
||||
"langchain_community.tools.StructuredTool",
|
||||
"langchain_core.tools.StructuredTool"
|
||||
],
|
||||
["langchain_community.tools.Tool", "langchain_core.tools.Tool"],
|
||||
[
|
||||
"langchain_community.tools.Tool",
|
||||
"langchain_core.tools.Tool"
|
||||
],
|
||||
[
|
||||
"langchain_community.tools.format_tool_to_openai_function",
|
||||
"langchain_core.utils.function_calling.format_tool_to_openai_function"
|
||||
],
|
||||
["langchain_community.tools.tool", "langchain_core.tools.tool"],
|
||||
[
|
||||
"langchain_community.tools.tool",
|
||||
"langchain_core.tools.tool"
|
||||
],
|
||||
[
|
||||
"langchain_community.tools.convert_to_openai.format_tool_to_openai_function",
|
||||
"langchain_core.utils.function_calling.format_tool_to_openai_function"
|
||||
@@ -98,4 +107,4 @@
|
||||
"langchain_community.vectorstores.VectorStore",
|
||||
"langchain_core.vectorstores.VectorStore"
|
||||
]
|
||||
]
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
[
|
||||
[
|
||||
"langchain_community.llms.fireworks.Fireworks",
|
||||
"langchain_fireworks.Fireworks"
|
||||
],
|
||||
[
|
||||
"langchain_community.chat_models.fireworks.ChatFireworks",
|
||||
"langchain_fireworks.ChatFireworks"
|
||||
],
|
||||
[
|
||||
"langchain_community.llms.Fireworks",
|
||||
"langchain_fireworks.Fireworks"
|
||||
],
|
||||
[
|
||||
"langchain_community.chat_models.ChatFireworks",
|
||||
"langchain_fireworks.ChatFireworks"
|
||||
]
|
||||
]
|
||||
@@ -0,0 +1,10 @@
|
||||
[
|
||||
[
|
||||
"langchain_community.llms.watsonxllm.WatsonxLLM",
|
||||
"langchain_ibm.WatsonxLLM"
|
||||
],
|
||||
[
|
||||
"langchain_community.llms.WatsonxLLM",
|
||||
"langchain_ibm.WatsonxLLM"
|
||||
]
|
||||
]
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,8 @@
|
||||
[
|
||||
["langchain._api.deprecated", "langchain_core._api.deprecated"],
|
||||
[
|
||||
"langchain._api.deprecated",
|
||||
"langchain_core._api.deprecated"
|
||||
],
|
||||
[
|
||||
"langchain._api.LangChainDeprecationWarning",
|
||||
"langchain_core._api.LangChainDeprecationWarning"
|
||||
@@ -12,7 +15,10 @@
|
||||
"langchain._api.surface_langchain_deprecation_warnings",
|
||||
"langchain_core._api.surface_langchain_deprecation_warnings"
|
||||
],
|
||||
["langchain._api.warn_deprecated", "langchain_core._api.warn_deprecated"],
|
||||
[
|
||||
"langchain._api.warn_deprecated",
|
||||
"langchain_core._api.warn_deprecated"
|
||||
],
|
||||
[
|
||||
"langchain._api.deprecation.LangChainDeprecationWarning",
|
||||
"langchain_core._api.LangChainDeprecationWarning"
|
||||
@@ -21,7 +27,10 @@
|
||||
"langchain._api.deprecation.LangChainPendingDeprecationWarning",
|
||||
"langchain_core._api.deprecation.LangChainPendingDeprecationWarning"
|
||||
],
|
||||
["langchain._api.deprecation.deprecated", "langchain_core._api.deprecated"],
|
||||
[
|
||||
"langchain._api.deprecation.deprecated",
|
||||
"langchain_core._api.deprecated"
|
||||
],
|
||||
[
|
||||
"langchain._api.deprecation.suppress_langchain_deprecation_warning",
|
||||
"langchain_core._api.suppress_langchain_deprecation_warning"
|
||||
@@ -38,12 +47,30 @@
|
||||
"langchain._api.path.get_relative_path",
|
||||
"langchain_core._api.get_relative_path"
|
||||
],
|
||||
["langchain._api.path.as_import_path", "langchain_core._api.as_import_path"],
|
||||
["langchain.agents.Tool", "langchain_core.tools.Tool"],
|
||||
["langchain.agents.tool", "langchain_core.tools.tool"],
|
||||
["langchain.agents.tools.BaseTool", "langchain_core.tools.BaseTool"],
|
||||
["langchain.agents.tools.tool", "langchain_core.tools.tool"],
|
||||
["langchain.agents.tools.Tool", "langchain_core.tools.Tool"],
|
||||
[
|
||||
"langchain._api.path.as_import_path",
|
||||
"langchain_core._api.as_import_path"
|
||||
],
|
||||
[
|
||||
"langchain.agents.Tool",
|
||||
"langchain_core.tools.Tool"
|
||||
],
|
||||
[
|
||||
"langchain.agents.tool",
|
||||
"langchain_core.tools.tool"
|
||||
],
|
||||
[
|
||||
"langchain.agents.tools.BaseTool",
|
||||
"langchain_core.tools.BaseTool"
|
||||
],
|
||||
[
|
||||
"langchain.agents.tools.tool",
|
||||
"langchain_core.tools.tool"
|
||||
],
|
||||
[
|
||||
"langchain.agents.tools.Tool",
|
||||
"langchain_core.tools.Tool"
|
||||
],
|
||||
[
|
||||
"langchain.base_language.BaseLanguageModel",
|
||||
"langchain_core.language_models.BaseLanguageModel"
|
||||
@@ -300,7 +327,10 @@
|
||||
"langchain.callbacks.tracers.schemas.LLMRun",
|
||||
"langchain_core.tracers.schemas.LLMRun"
|
||||
],
|
||||
["langchain.callbacks.tracers.schemas.Run", "langchain_core.tracers.Run"],
|
||||
[
|
||||
"langchain.callbacks.tracers.schemas.Run",
|
||||
"langchain_core.tracers.Run"
|
||||
],
|
||||
[
|
||||
"langchain.callbacks.tracers.schemas.RunTypeEnum",
|
||||
"langchain_core.tracers.schemas.RunTypeEnum"
|
||||
@@ -361,8 +391,14 @@
|
||||
"langchain.chat_models.base.agenerate_from_stream",
|
||||
"langchain_core.language_models.chat_models.agenerate_from_stream"
|
||||
],
|
||||
["langchain.docstore.document.Document", "langchain_core.documents.Document"],
|
||||
["langchain.document_loaders.Blob", "langchain_core.document_loaders.Blob"],
|
||||
[
|
||||
"langchain.docstore.document.Document",
|
||||
"langchain_core.documents.Document"
|
||||
],
|
||||
[
|
||||
"langchain.document_loaders.Blob",
|
||||
"langchain_core.document_loaders.Blob"
|
||||
],
|
||||
[
|
||||
"langchain.document_loaders.BlobLoader",
|
||||
"langchain_core.document_loaders.BlobLoader"
|
||||
@@ -399,29 +435,74 @@
|
||||
"langchain.formatting.StrictFormatter",
|
||||
"langchain_core.utils.StrictFormatter"
|
||||
],
|
||||
["langchain.input.get_bolded_text", "langchain_core.utils.get_bolded_text"],
|
||||
[
|
||||
"langchain.input.get_bolded_text",
|
||||
"langchain_core.utils.get_bolded_text"
|
||||
],
|
||||
[
|
||||
"langchain.input.get_color_mapping",
|
||||
"langchain_core.utils.get_color_mapping"
|
||||
],
|
||||
["langchain.input.get_colored_text", "langchain_core.utils.get_colored_text"],
|
||||
["langchain.input.print_text", "langchain_core.utils.print_text"],
|
||||
[
|
||||
"langchain.input.get_colored_text",
|
||||
"langchain_core.utils.get_colored_text"
|
||||
],
|
||||
[
|
||||
"langchain.input.print_text",
|
||||
"langchain_core.utils.print_text"
|
||||
],
|
||||
[
|
||||
"langchain.llms.base.BaseLanguageModel",
|
||||
"langchain_core.language_models.BaseLanguageModel"
|
||||
],
|
||||
["langchain.llms.base.BaseLLM", "langchain_core.language_models.BaseLLM"],
|
||||
["langchain.llms.base.LLM", "langchain_core.language_models.LLM"],
|
||||
["langchain.load.dumpd", "langchain_core.load.dumpd"],
|
||||
["langchain.load.dumps", "langchain_core.load.dumps"],
|
||||
["langchain.load.load", "langchain_core.load.load"],
|
||||
["langchain.load.loads", "langchain_core.load.loads"],
|
||||
["langchain.load.dump.default", "langchain_core.load.dump.default"],
|
||||
["langchain.load.dump.dumps", "langchain_core.load.dumps"],
|
||||
["langchain.load.dump.dumpd", "langchain_core.load.dumpd"],
|
||||
["langchain.load.load.Reviver", "langchain_core.load.load.Reviver"],
|
||||
["langchain.load.load.loads", "langchain_core.load.loads"],
|
||||
["langchain.load.load.load", "langchain_core.load.load"],
|
||||
[
|
||||
"langchain.llms.base.BaseLLM",
|
||||
"langchain_core.language_models.BaseLLM"
|
||||
],
|
||||
[
|
||||
"langchain.llms.base.LLM",
|
||||
"langchain_core.language_models.LLM"
|
||||
],
|
||||
[
|
||||
"langchain.load.dumpd",
|
||||
"langchain_core.load.dumpd"
|
||||
],
|
||||
[
|
||||
"langchain.load.dumps",
|
||||
"langchain_core.load.dumps"
|
||||
],
|
||||
[
|
||||
"langchain.load.load",
|
||||
"langchain_core.load.load"
|
||||
],
|
||||
[
|
||||
"langchain.load.loads",
|
||||
"langchain_core.load.loads"
|
||||
],
|
||||
[
|
||||
"langchain.load.dump.default",
|
||||
"langchain_core.load.dump.default"
|
||||
],
|
||||
[
|
||||
"langchain.load.dump.dumps",
|
||||
"langchain_core.load.dumps"
|
||||
],
|
||||
[
|
||||
"langchain.load.dump.dumpd",
|
||||
"langchain_core.load.dumpd"
|
||||
],
|
||||
[
|
||||
"langchain.load.load.Reviver",
|
||||
"langchain_core.load.load.Reviver"
|
||||
],
|
||||
[
|
||||
"langchain.load.load.loads",
|
||||
"langchain_core.load.loads"
|
||||
],
|
||||
[
|
||||
"langchain.load.load.load",
|
||||
"langchain_core.load.load"
|
||||
],
|
||||
[
|
||||
"langchain.load.serializable.BaseSerialized",
|
||||
"langchain_core.load.serializable.BaseSerialized"
|
||||
@@ -602,7 +683,10 @@
|
||||
"langchain.prompts.PipelinePromptTemplate",
|
||||
"langchain_core.prompts.PipelinePromptTemplate"
|
||||
],
|
||||
["langchain.prompts.PromptTemplate", "langchain_core.prompts.PromptTemplate"],
|
||||
[
|
||||
"langchain.prompts.PromptTemplate",
|
||||
"langchain_core.prompts.PromptTemplate"
|
||||
],
|
||||
[
|
||||
"langchain.prompts.SemanticSimilarityExampleSelector",
|
||||
"langchain_core.example_selectors.SemanticSimilarityExampleSelector"
|
||||
@@ -615,12 +699,18 @@
|
||||
"langchain.prompts.SystemMessagePromptTemplate",
|
||||
"langchain_core.prompts.SystemMessagePromptTemplate"
|
||||
],
|
||||
["langchain.prompts.load_prompt", "langchain_core.prompts.load_prompt"],
|
||||
[
|
||||
"langchain.prompts.load_prompt",
|
||||
"langchain_core.prompts.load_prompt"
|
||||
],
|
||||
[
|
||||
"langchain.prompts.FewShotChatMessagePromptTemplate",
|
||||
"langchain_core.prompts.FewShotChatMessagePromptTemplate"
|
||||
],
|
||||
["langchain.prompts.Prompt", "langchain_core.prompts.PromptTemplate"],
|
||||
[
|
||||
"langchain.prompts.Prompt",
|
||||
"langchain_core.prompts.PromptTemplate"
|
||||
],
|
||||
[
|
||||
"langchain.prompts.base.jinja2_formatter",
|
||||
"langchain_core.prompts.jinja2_formatter"
|
||||
@@ -801,13 +891,34 @@
|
||||
"langchain.prompts.prompt.PromptTemplate",
|
||||
"langchain_core.prompts.PromptTemplate"
|
||||
],
|
||||
["langchain.prompts.prompt.Prompt", "langchain_core.prompts.PromptTemplate"],
|
||||
["langchain.schema.BaseCache", "langchain_core.caches.BaseCache"],
|
||||
["langchain.schema.BaseMemory", "langchain_core.memory.BaseMemory"],
|
||||
["langchain.schema.BaseStore", "langchain_core.stores.BaseStore"],
|
||||
["langchain.schema.AgentFinish", "langchain_core.agents.AgentFinish"],
|
||||
["langchain.schema.AgentAction", "langchain_core.agents.AgentAction"],
|
||||
["langchain.schema.Document", "langchain_core.documents.Document"],
|
||||
[
|
||||
"langchain.prompts.prompt.Prompt",
|
||||
"langchain_core.prompts.PromptTemplate"
|
||||
],
|
||||
[
|
||||
"langchain.schema.BaseCache",
|
||||
"langchain_core.caches.BaseCache"
|
||||
],
|
||||
[
|
||||
"langchain.schema.BaseMemory",
|
||||
"langchain_core.memory.BaseMemory"
|
||||
],
|
||||
[
|
||||
"langchain.schema.BaseStore",
|
||||
"langchain_core.stores.BaseStore"
|
||||
],
|
||||
[
|
||||
"langchain.schema.AgentFinish",
|
||||
"langchain_core.agents.AgentFinish"
|
||||
],
|
||||
[
|
||||
"langchain.schema.AgentAction",
|
||||
"langchain_core.agents.AgentAction"
|
||||
],
|
||||
[
|
||||
"langchain.schema.Document",
|
||||
"langchain_core.documents.Document"
|
||||
],
|
||||
[
|
||||
"langchain.schema.BaseChatMessageHistory",
|
||||
"langchain_core.chat_history.BaseChatMessageHistory"
|
||||
@@ -816,15 +927,30 @@
|
||||
"langchain.schema.BaseDocumentTransformer",
|
||||
"langchain_core.documents.BaseDocumentTransformer"
|
||||
],
|
||||
["langchain.schema.BaseMessage", "langchain_core.messages.BaseMessage"],
|
||||
["langchain.schema.ChatMessage", "langchain_core.messages.ChatMessage"],
|
||||
[
|
||||
"langchain.schema.BaseMessage",
|
||||
"langchain_core.messages.BaseMessage"
|
||||
],
|
||||
[
|
||||
"langchain.schema.ChatMessage",
|
||||
"langchain_core.messages.ChatMessage"
|
||||
],
|
||||
[
|
||||
"langchain.schema.FunctionMessage",
|
||||
"langchain_core.messages.FunctionMessage"
|
||||
],
|
||||
["langchain.schema.HumanMessage", "langchain_core.messages.HumanMessage"],
|
||||
["langchain.schema.AIMessage", "langchain_core.messages.AIMessage"],
|
||||
["langchain.schema.SystemMessage", "langchain_core.messages.SystemMessage"],
|
||||
[
|
||||
"langchain.schema.HumanMessage",
|
||||
"langchain_core.messages.HumanMessage"
|
||||
],
|
||||
[
|
||||
"langchain.schema.AIMessage",
|
||||
"langchain_core.messages.AIMessage"
|
||||
],
|
||||
[
|
||||
"langchain.schema.SystemMessage",
|
||||
"langchain_core.messages.SystemMessage"
|
||||
],
|
||||
[
|
||||
"langchain.schema.messages_from_dict",
|
||||
"langchain_core.messages.messages_from_dict"
|
||||
@@ -849,18 +975,42 @@
|
||||
"langchain.schema.get_buffer_string",
|
||||
"langchain_core.messages.get_buffer_string"
|
||||
],
|
||||
["langchain.schema.RunInfo", "langchain_core.outputs.RunInfo"],
|
||||
["langchain.schema.LLMResult", "langchain_core.outputs.LLMResult"],
|
||||
["langchain.schema.ChatResult", "langchain_core.outputs.ChatResult"],
|
||||
["langchain.schema.ChatGeneration", "langchain_core.outputs.ChatGeneration"],
|
||||
["langchain.schema.Generation", "langchain_core.outputs.Generation"],
|
||||
["langchain.schema.PromptValue", "langchain_core.prompt_values.PromptValue"],
|
||||
[
|
||||
"langchain.schema.RunInfo",
|
||||
"langchain_core.outputs.RunInfo"
|
||||
],
|
||||
[
|
||||
"langchain.schema.LLMResult",
|
||||
"langchain_core.outputs.LLMResult"
|
||||
],
|
||||
[
|
||||
"langchain.schema.ChatResult",
|
||||
"langchain_core.outputs.ChatResult"
|
||||
],
|
||||
[
|
||||
"langchain.schema.ChatGeneration",
|
||||
"langchain_core.outputs.ChatGeneration"
|
||||
],
|
||||
[
|
||||
"langchain.schema.Generation",
|
||||
"langchain_core.outputs.Generation"
|
||||
],
|
||||
[
|
||||
"langchain.schema.PromptValue",
|
||||
"langchain_core.prompt_values.PromptValue"
|
||||
],
|
||||
[
|
||||
"langchain.schema.LangChainException",
|
||||
"langchain_core.exceptions.LangChainException"
|
||||
],
|
||||
["langchain.schema.BaseRetriever", "langchain_core.retrievers.BaseRetriever"],
|
||||
["langchain.schema.Memory", "langchain_core.memory.BaseMemory"],
|
||||
[
|
||||
"langchain.schema.BaseRetriever",
|
||||
"langchain_core.retrievers.BaseRetriever"
|
||||
],
|
||||
[
|
||||
"langchain.schema.Memory",
|
||||
"langchain_core.memory.BaseMemory"
|
||||
],
|
||||
[
|
||||
"langchain.schema.OutputParserException",
|
||||
"langchain_core.exceptions.OutputParserException"
|
||||
@@ -885,13 +1035,22 @@
|
||||
"langchain.schema.format_document",
|
||||
"langchain_core.prompts.format_document"
|
||||
],
|
||||
["langchain.schema.agent.AgentAction", "langchain_core.agents.AgentAction"],
|
||||
[
|
||||
"langchain.schema.agent.AgentAction",
|
||||
"langchain_core.agents.AgentAction"
|
||||
],
|
||||
[
|
||||
"langchain.schema.agent.AgentActionMessageLog",
|
||||
"langchain_core.agents.AgentActionMessageLog"
|
||||
],
|
||||
["langchain.schema.agent.AgentFinish", "langchain_core.agents.AgentFinish"],
|
||||
["langchain.schema.cache.BaseCache", "langchain_core.caches.BaseCache"],
|
||||
[
|
||||
"langchain.schema.agent.AgentFinish",
|
||||
"langchain_core.agents.AgentFinish"
|
||||
],
|
||||
[
|
||||
"langchain.schema.cache.BaseCache",
|
||||
"langchain_core.caches.BaseCache"
|
||||
],
|
||||
[
|
||||
"langchain.schema.callbacks.base.RetrieverManagerMixin",
|
||||
"langchain_core.callbacks.RetrieverManagerMixin"
|
||||
@@ -1168,7 +1327,10 @@
|
||||
"langchain.schema.chat_history.BaseChatMessageHistory",
|
||||
"langchain_core.chat_history.BaseChatMessageHistory"
|
||||
],
|
||||
["langchain.schema.document.Document", "langchain_core.documents.Document"],
|
||||
[
|
||||
"langchain.schema.document.Document",
|
||||
"langchain_core.documents.Document"
|
||||
],
|
||||
[
|
||||
"langchain.schema.document.BaseDocumentTransformer",
|
||||
"langchain_core.documents.BaseDocumentTransformer"
|
||||
@@ -1189,7 +1351,10 @@
|
||||
"langchain.schema.language_model._get_token_ids_default_method",
|
||||
"langchain_core.language_models.base._get_token_ids_default_method"
|
||||
],
|
||||
["langchain.schema.memory.BaseMemory", "langchain_core.memory.BaseMemory"],
|
||||
[
|
||||
"langchain.schema.memory.BaseMemory",
|
||||
"langchain_core.memory.BaseMemory"
|
||||
],
|
||||
[
|
||||
"langchain.schema.messages.get_buffer_string",
|
||||
"langchain_core.messages.get_buffer_string"
|
||||
@@ -1214,7 +1379,10 @@
|
||||
"langchain.schema.messages.HumanMessageChunk",
|
||||
"langchain_core.messages.HumanMessageChunk"
|
||||
],
|
||||
["langchain.schema.messages.AIMessage", "langchain_core.messages.AIMessage"],
|
||||
[
|
||||
"langchain.schema.messages.AIMessage",
|
||||
"langchain_core.messages.AIMessage"
|
||||
],
|
||||
[
|
||||
"langchain.schema.messages.AIMessageChunk",
|
||||
"langchain_core.messages.AIMessageChunk"
|
||||
@@ -1271,7 +1439,10 @@
|
||||
"langchain.schema.messages.message_to_dict",
|
||||
"langchain_core.messages.message_to_dict"
|
||||
],
|
||||
["langchain.schema.output.Generation", "langchain_core.outputs.Generation"],
|
||||
[
|
||||
"langchain.schema.output.Generation",
|
||||
"langchain_core.outputs.Generation"
|
||||
],
|
||||
[
|
||||
"langchain.schema.output.GenerationChunk",
|
||||
"langchain_core.outputs.GenerationChunk"
|
||||
@@ -1284,9 +1455,18 @@
|
||||
"langchain.schema.output.ChatGenerationChunk",
|
||||
"langchain_core.outputs.ChatGenerationChunk"
|
||||
],
|
||||
["langchain.schema.output.RunInfo", "langchain_core.outputs.RunInfo"],
|
||||
["langchain.schema.output.ChatResult", "langchain_core.outputs.ChatResult"],
|
||||
["langchain.schema.output.LLMResult", "langchain_core.outputs.LLMResult"],
|
||||
[
|
||||
"langchain.schema.output.RunInfo",
|
||||
"langchain_core.outputs.RunInfo"
|
||||
],
|
||||
[
|
||||
"langchain.schema.output.ChatResult",
|
||||
"langchain_core.outputs.ChatResult"
|
||||
],
|
||||
[
|
||||
"langchain.schema.output.LLMResult",
|
||||
"langchain_core.outputs.LLMResult"
|
||||
],
|
||||
[
|
||||
"langchain.schema.output_parser.BaseLLMOutputParser",
|
||||
"langchain_core.output_parsers.BaseLLMOutputParser"
|
||||
@@ -1359,7 +1539,10 @@
|
||||
"langchain.schema.runnable.RouterRunnable",
|
||||
"langchain_core.runnables.RouterRunnable"
|
||||
],
|
||||
["langchain.schema.runnable.Runnable", "langchain_core.runnables.Runnable"],
|
||||
[
|
||||
"langchain.schema.runnable.Runnable",
|
||||
"langchain_core.runnables.Runnable"
|
||||
],
|
||||
[
|
||||
"langchain.schema.runnable.RunnableSerializable",
|
||||
"langchain_core.runnables.RunnableSerializable"
|
||||
@@ -1596,7 +1779,10 @@
|
||||
"langchain.schema.runnable.utils.SupportsAdd",
|
||||
"langchain_core.runnables.utils.SupportsAdd"
|
||||
],
|
||||
["langchain.schema.runnable.utils.add", "langchain_core.runnables.add"],
|
||||
[
|
||||
"langchain.schema.runnable.utils.add",
|
||||
"langchain_core.runnables.add"
|
||||
],
|
||||
[
|
||||
"langchain.schema.runnable.utils.ConfigurableField",
|
||||
"langchain_core.runnables.ConfigurableField"
|
||||
@@ -1617,7 +1803,10 @@
|
||||
"langchain.schema.runnable.utils.get_unique_config_specs",
|
||||
"langchain_core.runnables.utils.get_unique_config_specs"
|
||||
],
|
||||
["langchain.schema.runnable.utils.aadd", "langchain_core.runnables.aadd"],
|
||||
[
|
||||
"langchain.schema.runnable.utils.aadd",
|
||||
"langchain_core.runnables.aadd"
|
||||
],
|
||||
[
|
||||
"langchain.schema.runnable.utils.gated_coro",
|
||||
"langchain_core.runnables.utils.gated_coro"
|
||||
@@ -1626,7 +1815,10 @@
|
||||
"langchain.schema.runnable.utils.gather_with_concurrency",
|
||||
"langchain_core.runnables.utils.gather_with_concurrency"
|
||||
],
|
||||
["langchain.schema.storage.BaseStore", "langchain_core.stores.BaseStore"],
|
||||
[
|
||||
"langchain.schema.storage.BaseStore",
|
||||
"langchain_core.stores.BaseStore"
|
||||
],
|
||||
[
|
||||
"langchain.schema.vectorstore.VectorStore",
|
||||
"langchain_core.vectorstores.VectorStore"
|
||||
@@ -1635,14 +1827,26 @@
|
||||
"langchain.schema.vectorstore.VectorStoreRetriever",
|
||||
"langchain_core.vectorstores.VectorStoreRetriever"
|
||||
],
|
||||
["langchain.tools.BaseTool", "langchain_core.tools.BaseTool"],
|
||||
["langchain.tools.StructuredTool", "langchain_core.tools.StructuredTool"],
|
||||
["langchain.tools.Tool", "langchain_core.tools.Tool"],
|
||||
[
|
||||
"langchain.tools.BaseTool",
|
||||
"langchain_core.tools.BaseTool"
|
||||
],
|
||||
[
|
||||
"langchain.tools.StructuredTool",
|
||||
"langchain_core.tools.StructuredTool"
|
||||
],
|
||||
[
|
||||
"langchain.tools.Tool",
|
||||
"langchain_core.tools.Tool"
|
||||
],
|
||||
[
|
||||
"langchain.tools.format_tool_to_openai_function",
|
||||
"langchain_core.utils.function_calling.format_tool_to_openai_function"
|
||||
],
|
||||
["langchain.tools.tool", "langchain_core.tools.tool"],
|
||||
[
|
||||
"langchain.tools.tool",
|
||||
"langchain_core.tools.tool"
|
||||
],
|
||||
[
|
||||
"langchain.tools.base.SchemaAnnotationError",
|
||||
"langchain_core.tools.SchemaAnnotationError"
|
||||
@@ -1651,14 +1855,26 @@
|
||||
"langchain.tools.base.create_schema_from_function",
|
||||
"langchain_core.tools.create_schema_from_function"
|
||||
],
|
||||
["langchain.tools.base.ToolException", "langchain_core.tools.ToolException"],
|
||||
["langchain.tools.base.BaseTool", "langchain_core.tools.BaseTool"],
|
||||
["langchain.tools.base.Tool", "langchain_core.tools.Tool"],
|
||||
[
|
||||
"langchain.tools.base.ToolException",
|
||||
"langchain_core.tools.ToolException"
|
||||
],
|
||||
[
|
||||
"langchain.tools.base.BaseTool",
|
||||
"langchain_core.tools.BaseTool"
|
||||
],
|
||||
[
|
||||
"langchain.tools.base.Tool",
|
||||
"langchain_core.tools.Tool"
|
||||
],
|
||||
[
|
||||
"langchain.tools.base.StructuredTool",
|
||||
"langchain_core.tools.StructuredTool"
|
||||
],
|
||||
["langchain.tools.base.tool", "langchain_core.tools.tool"],
|
||||
[
|
||||
"langchain.tools.base.tool",
|
||||
"langchain_core.tools.tool"
|
||||
],
|
||||
[
|
||||
"langchain.tools.convert_to_openai.format_tool_to_openai_function",
|
||||
"langchain_core.utils.function_calling.format_tool_to_openai_function"
|
||||
@@ -1675,49 +1891,94 @@
|
||||
"langchain.utilities.loading.try_load_from_hub",
|
||||
"langchain_core.utils.try_load_from_hub"
|
||||
],
|
||||
["langchain.utils.StrictFormatter", "langchain_core.utils.StrictFormatter"],
|
||||
[
|
||||
"langchain.utils.StrictFormatter",
|
||||
"langchain_core.utils.StrictFormatter"
|
||||
],
|
||||
[
|
||||
"langchain.utils.check_package_version",
|
||||
"langchain_core.utils.check_package_version"
|
||||
],
|
||||
["langchain.utils.comma_list", "langchain_core.utils.comma_list"],
|
||||
[
|
||||
"langchain.utils.comma_list",
|
||||
"langchain_core.utils.comma_list"
|
||||
],
|
||||
[
|
||||
"langchain.utils.convert_to_secret_str",
|
||||
"langchain_core.utils.convert_to_secret_str"
|
||||
],
|
||||
["langchain.utils.get_bolded_text", "langchain_core.utils.get_bolded_text"],
|
||||
[
|
||||
"langchain.utils.get_bolded_text",
|
||||
"langchain_core.utils.get_bolded_text"
|
||||
],
|
||||
[
|
||||
"langchain.utils.get_color_mapping",
|
||||
"langchain_core.utils.get_color_mapping"
|
||||
],
|
||||
["langchain.utils.get_colored_text", "langchain_core.utils.get_colored_text"],
|
||||
[
|
||||
"langchain.utils.get_colored_text",
|
||||
"langchain_core.utils.get_colored_text"
|
||||
],
|
||||
[
|
||||
"langchain.utils.get_from_dict_or_env",
|
||||
"langchain_core.utils.get_from_dict_or_env"
|
||||
],
|
||||
["langchain.utils.get_from_env", "langchain_core.utils.get_from_env"],
|
||||
[
|
||||
"langchain.utils.get_from_env",
|
||||
"langchain_core.utils.get_from_env"
|
||||
],
|
||||
[
|
||||
"langchain.utils.get_pydantic_field_names",
|
||||
"langchain_core.utils.get_pydantic_field_names"
|
||||
],
|
||||
["langchain.utils.guard_import", "langchain_core.utils.guard_import"],
|
||||
["langchain.utils.mock_now", "langchain_core.utils.mock_now"],
|
||||
["langchain.utils.print_text", "langchain_core.utils.print_text"],
|
||||
[
|
||||
"langchain.utils.guard_import",
|
||||
"langchain_core.utils.guard_import"
|
||||
],
|
||||
[
|
||||
"langchain.utils.mock_now",
|
||||
"langchain_core.utils.mock_now"
|
||||
],
|
||||
[
|
||||
"langchain.utils.print_text",
|
||||
"langchain_core.utils.print_text"
|
||||
],
|
||||
[
|
||||
"langchain.utils.raise_for_status_with_text",
|
||||
"langchain_core.utils.raise_for_status_with_text"
|
||||
],
|
||||
["langchain.utils.stringify_dict", "langchain_core.utils.stringify_dict"],
|
||||
["langchain.utils.stringify_value", "langchain_core.utils.stringify_value"],
|
||||
["langchain.utils.xor_args", "langchain_core.utils.xor_args"],
|
||||
["langchain.utils.aiter.py_anext", "langchain_core.utils.aiter.py_anext"],
|
||||
["langchain.utils.aiter.NoLock", "langchain_core.utils.aiter.NoLock"],
|
||||
["langchain.utils.aiter.Tee", "langchain_core.utils.aiter.Tee"],
|
||||
[
|
||||
"langchain.utils.stringify_dict",
|
||||
"langchain_core.utils.stringify_dict"
|
||||
],
|
||||
[
|
||||
"langchain.utils.stringify_value",
|
||||
"langchain_core.utils.stringify_value"
|
||||
],
|
||||
[
|
||||
"langchain.utils.xor_args",
|
||||
"langchain_core.utils.xor_args"
|
||||
],
|
||||
[
|
||||
"langchain.utils.aiter.py_anext",
|
||||
"langchain_core.utils.aiter.py_anext"
|
||||
],
|
||||
[
|
||||
"langchain.utils.aiter.NoLock",
|
||||
"langchain_core.utils.aiter.NoLock"
|
||||
],
|
||||
[
|
||||
"langchain.utils.aiter.Tee",
|
||||
"langchain_core.utils.aiter.Tee"
|
||||
],
|
||||
[
|
||||
"langchain.utils.env.get_from_dict_or_env",
|
||||
"langchain_core.utils.get_from_dict_or_env"
|
||||
],
|
||||
["langchain.utils.env.get_from_env", "langchain_core.utils.get_from_env"],
|
||||
[
|
||||
"langchain.utils.env.get_from_env",
|
||||
"langchain_core.utils.get_from_env"
|
||||
],
|
||||
[
|
||||
"langchain.utils.formatting.StrictFormatter",
|
||||
"langchain_core.utils.StrictFormatter"
|
||||
@@ -1742,10 +2003,22 @@
|
||||
"langchain.utils.input.get_bolded_text",
|
||||
"langchain_core.utils.get_bolded_text"
|
||||
],
|
||||
["langchain.utils.input.print_text", "langchain_core.utils.print_text"],
|
||||
["langchain.utils.iter.NoLock", "langchain_core.utils.iter.NoLock"],
|
||||
["langchain.utils.iter.tee_peer", "langchain_core.utils.iter.tee_peer"],
|
||||
["langchain.utils.iter.Tee", "langchain_core.utils.iter.Tee"],
|
||||
[
|
||||
"langchain.utils.input.print_text",
|
||||
"langchain_core.utils.print_text"
|
||||
],
|
||||
[
|
||||
"langchain.utils.iter.NoLock",
|
||||
"langchain_core.utils.iter.NoLock"
|
||||
],
|
||||
[
|
||||
"langchain.utils.iter.tee_peer",
|
||||
"langchain_core.utils.iter.tee_peer"
|
||||
],
|
||||
[
|
||||
"langchain.utils.iter.Tee",
|
||||
"langchain_core.utils.iter.Tee"
|
||||
],
|
||||
[
|
||||
"langchain.utils.iter.batch_iterate",
|
||||
"langchain_core.utils.iter.batch_iterate"
|
||||
@@ -1798,14 +2071,26 @@
|
||||
"langchain.utils.strings.stringify_dict",
|
||||
"langchain_core.utils.stringify_dict"
|
||||
],
|
||||
["langchain.utils.strings.comma_list", "langchain_core.utils.comma_list"],
|
||||
["langchain.utils.utils.xor_args", "langchain_core.utils.xor_args"],
|
||||
[
|
||||
"langchain.utils.strings.comma_list",
|
||||
"langchain_core.utils.comma_list"
|
||||
],
|
||||
[
|
||||
"langchain.utils.utils.xor_args",
|
||||
"langchain_core.utils.xor_args"
|
||||
],
|
||||
[
|
||||
"langchain.utils.utils.raise_for_status_with_text",
|
||||
"langchain_core.utils.raise_for_status_with_text"
|
||||
],
|
||||
["langchain.utils.utils.mock_now", "langchain_core.utils.mock_now"],
|
||||
["langchain.utils.utils.guard_import", "langchain_core.utils.guard_import"],
|
||||
[
|
||||
"langchain.utils.utils.mock_now",
|
||||
"langchain_core.utils.mock_now"
|
||||
],
|
||||
[
|
||||
"langchain.utils.utils.guard_import",
|
||||
"langchain_core.utils.guard_import"
|
||||
],
|
||||
[
|
||||
"langchain.utils.utils.check_package_version",
|
||||
"langchain_core.utils.check_package_version"
|
||||
@@ -7,8 +7,14 @@
|
||||
"langchain.text_splitter.TextSplitter",
|
||||
"langchain_text_splitters.TextSplitter"
|
||||
],
|
||||
["langchain.text_splitter.Tokenizer", "langchain_text_splitters.Tokenizer"],
|
||||
["langchain.text_splitter.Language", "langchain_text_splitters.Language"],
|
||||
[
|
||||
"langchain.text_splitter.Tokenizer",
|
||||
"langchain_text_splitters.Tokenizer"
|
||||
],
|
||||
[
|
||||
"langchain.text_splitter.Language",
|
||||
"langchain_text_splitters.Language"
|
||||
],
|
||||
[
|
||||
"langchain.text_splitter.RecursiveCharacterTextSplitter",
|
||||
"langchain_text_splitters.RecursiveCharacterTextSplitter"
|
||||
@@ -49,8 +55,14 @@
|
||||
"langchain.text_splitter.ElementType",
|
||||
"langchain_text_splitters.ElementType"
|
||||
],
|
||||
["langchain.text_splitter.HeaderType", "langchain_text_splitters.HeaderType"],
|
||||
["langchain.text_splitter.LineType", "langchain_text_splitters.LineType"],
|
||||
[
|
||||
"langchain.text_splitter.HeaderType",
|
||||
"langchain_text_splitters.HeaderType"
|
||||
],
|
||||
[
|
||||
"langchain.text_splitter.LineType",
|
||||
"langchain_text_splitters.LineType"
|
||||
],
|
||||
[
|
||||
"langchain.text_splitter.HTMLHeaderTextSplitter",
|
||||
"langchain_text_splitters.HTMLHeaderTextSplitter"
|
||||
@@ -67,4 +79,4 @@
|
||||
"langchain.text_splitter.CharacterTextSplitter",
|
||||
"langchain_text_splitters.CharacterTextSplitter"
|
||||
]
|
||||
]
|
||||
]
|
||||
@@ -0,0 +1,50 @@
|
||||
[
|
||||
[
|
||||
"langchain_community.llms.openai.OpenAI",
|
||||
"langchain_openai.OpenAI"
|
||||
],
|
||||
[
|
||||
"langchain_community.llms.openai.AzureOpenAI",
|
||||
"langchain_openai.AzureOpenAI"
|
||||
],
|
||||
[
|
||||
"langchain_community.embeddings.openai.OpenAIEmbeddings",
|
||||
"langchain_openai.OpenAIEmbeddings"
|
||||
],
|
||||
[
|
||||
"langchain_community.embeddings.azure_openai.AzureOpenAIEmbeddings",
|
||||
"langchain_openai.AzureOpenAIEmbeddings"
|
||||
],
|
||||
[
|
||||
"langchain_community.chat_models.openai.ChatOpenAI",
|
||||
"langchain_openai.ChatOpenAI"
|
||||
],
|
||||
[
|
||||
"langchain_community.chat_models.azure_openai.AzureChatOpenAI",
|
||||
"langchain_openai.AzureChatOpenAI"
|
||||
],
|
||||
[
|
||||
"langchain_community.llms.AzureOpenAI",
|
||||
"langchain_openai.AzureOpenAI"
|
||||
],
|
||||
[
|
||||
"langchain_community.llms.OpenAI",
|
||||
"langchain_openai.OpenAI"
|
||||
],
|
||||
[
|
||||
"langchain_community.embeddings.AzureOpenAIEmbeddings",
|
||||
"langchain_openai.AzureOpenAIEmbeddings"
|
||||
],
|
||||
[
|
||||
"langchain_community.embeddings.OpenAIEmbeddings",
|
||||
"langchain_openai.OpenAIEmbeddings"
|
||||
],
|
||||
[
|
||||
"langchain_community.chat_models.AzureChatOpenAI",
|
||||
"langchain_openai.AzureChatOpenAI"
|
||||
],
|
||||
[
|
||||
"langchain_community.chat_models.ChatOpenAI",
|
||||
"langchain_openai.ChatOpenAI"
|
||||
]
|
||||
]
|
||||
@@ -0,0 +1,10 @@
|
||||
[
|
||||
[
|
||||
"langchain_community.vectorstores.pinecone.Pinecone",
|
||||
"langchain_pinecone.Pinecone"
|
||||
],
|
||||
[
|
||||
"langchain_community.vectorstores.Pinecone",
|
||||
"langchain_pinecone.Pinecone"
|
||||
]
|
||||
]
|
||||
@@ -0,0 +1,204 @@
|
||||
"""
|
||||
# Adapted from bump-pydantic
|
||||
# https://github.com/pydantic/bump-pydantic
|
||||
|
||||
This codemod deals with the following cases:
|
||||
|
||||
1. `from pydantic import BaseSettings`
|
||||
2. `from pydantic.settings import BaseSettings`
|
||||
3. `from pydantic import BaseSettings as <name>`
|
||||
4. `from pydantic.settings import BaseSettings as <name>` # TODO: This is not working.
|
||||
5. `import pydantic` -> `pydantic.BaseSettings`
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
from dataclasses import dataclass
|
||||
from typing import Callable, Dict, Iterable, List, Sequence, Tuple, Type, TypeVar
|
||||
|
||||
import libcst as cst
|
||||
import libcst.matchers as m
|
||||
from libcst.codemod import VisitorBasedCodemodCommand
|
||||
from libcst.codemod.visitors import AddImportsVisitor
|
||||
|
||||
HERE = os.path.dirname(__file__)
|
||||
|
||||
|
||||
def _load_migrations_by_file(path: str):
|
||||
migrations_path = os.path.join(HERE, "migrations", path)
|
||||
with open(migrations_path, "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
|
||||
# new migrations
|
||||
new_migrations = []
|
||||
for migration in data:
|
||||
old = migration[0].split(".")[-1]
|
||||
new = migration[1].split(".")[-1]
|
||||
|
||||
if old == new:
|
||||
new_migrations.append(migration)
|
||||
|
||||
return new_migrations
|
||||
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
def _deduplicate_in_order(
|
||||
seq: Iterable[T], key: Callable[[T], str] = lambda x: x
|
||||
) -> List[T]:
|
||||
seen = set()
|
||||
seen_add = seen.add
|
||||
return [x for x in seq if not (key(x) in seen or seen_add(key(x)))]
|
||||
|
||||
|
||||
def _load_migrations_from_fixtures(paths: List[str]) -> List[Tuple[str, str]]:
|
||||
"""Load migrations from fixtures."""
|
||||
data = []
|
||||
for path in paths:
|
||||
data.extend(_load_migrations_by_file(path))
|
||||
data = _deduplicate_in_order(data, key=lambda x: x[0])
|
||||
return data
|
||||
|
||||
|
||||
def _load_migrations(paths: List[str]):
|
||||
"""Load the migrations from the JSON file."""
|
||||
# Later earlier ones have higher precedence.
|
||||
imports: Dict[str, Tuple[str, str]] = {}
|
||||
data = _load_migrations_from_fixtures(paths)
|
||||
|
||||
for old_path, new_path in data:
|
||||
# Parse the old parse which is of the format 'langchain.chat_models.ChatOpenAI'
|
||||
# into the module and class name.
|
||||
old_parts = old_path.split(".")
|
||||
old_module = ".".join(old_parts[:-1])
|
||||
old_class = old_parts[-1]
|
||||
old_path_str = f"{old_module}:{old_class}"
|
||||
|
||||
# Parse the new parse which is of the format 'langchain.chat_models.ChatOpenAI'
|
||||
# Into a 2-tuple of the module and class name.
|
||||
new_parts = new_path.split(".")
|
||||
new_module = ".".join(new_parts[:-1])
|
||||
new_class = new_parts[-1]
|
||||
new_path_str = (new_module, new_class)
|
||||
|
||||
imports[old_path_str] = new_path_str
|
||||
|
||||
return imports
|
||||
|
||||
|
||||
def resolve_module_parts(module_parts: list[str]) -> m.Attribute | m.Name:
|
||||
"""Converts a list of module parts to a `Name` or `Attribute` node."""
|
||||
if len(module_parts) == 1:
|
||||
return m.Name(module_parts[0])
|
||||
if len(module_parts) == 2:
|
||||
first, last = module_parts
|
||||
return m.Attribute(value=m.Name(first), attr=m.Name(last))
|
||||
last_name = module_parts.pop()
|
||||
attr = resolve_module_parts(module_parts)
|
||||
return m.Attribute(value=attr, attr=m.Name(last_name))
|
||||
|
||||
|
||||
def get_import_from_from_str(import_str: str) -> m.ImportFrom:
|
||||
"""Converts a string like `pydantic:BaseSettings` to Examples:
|
||||
>>> get_import_from_from_str("pydantic:BaseSettings")
|
||||
ImportFrom(
|
||||
module=Name("pydantic"),
|
||||
names=[ImportAlias(name=Name("BaseSettings"))],
|
||||
)
|
||||
>>> get_import_from_from_str("pydantic.settings:BaseSettings")
|
||||
ImportFrom(
|
||||
module=Attribute(value=Name("pydantic"), attr=Name("settings")),
|
||||
names=[ImportAlias(name=Name("BaseSettings"))],
|
||||
)
|
||||
>>> get_import_from_from_str("a.b.c:d")
|
||||
ImportFrom(
|
||||
module=Attribute(
|
||||
value=Attribute(value=Name("a"), attr=Name("b")), attr=Name("c")
|
||||
),
|
||||
names=[ImportAlias(name=Name("d"))],
|
||||
)
|
||||
"""
|
||||
module, name = import_str.split(":")
|
||||
module_parts = module.split(".")
|
||||
module_node = resolve_module_parts(module_parts)
|
||||
return m.ImportFrom(
|
||||
module=module_node,
|
||||
names=[m.ZeroOrMore(), m.ImportAlias(name=m.Name(value=name)), m.ZeroOrMore()],
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ImportInfo:
|
||||
import_from: m.ImportFrom
|
||||
import_str: str
|
||||
to_import_str: tuple[str, str]
|
||||
|
||||
|
||||
RULE_TO_PATHS = {
|
||||
"langchain_to_community": ["langchain_to_community.json"],
|
||||
"langchain_to_core": ["langchain_to_core.json"],
|
||||
"community_to_core": ["community_to_core.json"],
|
||||
"langchain_to_text_splitters": ["langchain_to_text_splitters.json"],
|
||||
"community_to_partner": [
|
||||
"anthropic.json",
|
||||
"fireworks.json",
|
||||
"ibm.json",
|
||||
"openai.json",
|
||||
"pinecone.json",
|
||||
"astradb.json",
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def generate_import_replacer(rules: List[str]) -> Type[VisitorBasedCodemodCommand]:
|
||||
"""Generate a codemod to replace imports."""
|
||||
paths = []
|
||||
for rule in rules:
|
||||
if rule not in RULE_TO_PATHS:
|
||||
raise ValueError(f"Unknown rule: {rule}. Use one of {RULE_TO_PATHS.keys()}")
|
||||
|
||||
paths.extend(RULE_TO_PATHS[rule])
|
||||
|
||||
imports = _load_migrations(paths)
|
||||
|
||||
import_infos = [
|
||||
ImportInfo(
|
||||
import_from=get_import_from_from_str(import_str),
|
||||
import_str=import_str,
|
||||
to_import_str=to_import_str,
|
||||
)
|
||||
for import_str, to_import_str in imports.items()
|
||||
]
|
||||
import_match = m.OneOf(*[info.import_from for info in import_infos])
|
||||
|
||||
class ReplaceImportsCodemod(VisitorBasedCodemodCommand):
|
||||
@m.leave(import_match)
|
||||
def leave_replace_import(
|
||||
self, _: cst.ImportFrom, updated_node: cst.ImportFrom
|
||||
) -> cst.ImportFrom:
|
||||
for import_info in import_infos:
|
||||
if m.matches(updated_node, import_info.import_from):
|
||||
aliases: Sequence[cst.ImportAlias] = updated_node.names # type: ignore
|
||||
# If multiple objects are imported in a single import statement,
|
||||
# we need to remove only the one we're replacing.
|
||||
AddImportsVisitor.add_needed_import(
|
||||
self.context, *import_info.to_import_str
|
||||
)
|
||||
if len(updated_node.names) > 1: # type: ignore
|
||||
names = [
|
||||
alias
|
||||
for alias in aliases
|
||||
if alias.name.value != import_info.to_import_str[-1]
|
||||
]
|
||||
names[-1] = names[-1].with_changes(
|
||||
comma=cst.MaybeSentinel.DEFAULT
|
||||
)
|
||||
updated_node = updated_node.with_changes(names=names)
|
||||
else:
|
||||
return cst.RemoveFromParent() # type: ignore[return-value]
|
||||
return updated_node
|
||||
|
||||
return ReplaceImportsCodemod
|
||||
@@ -1,40 +0,0 @@
|
||||
from typing import List, Tuple
|
||||
|
||||
|
||||
def split_package(package: str) -> Tuple[str, str]:
|
||||
"""Split a package name into the containing package and the final name"""
|
||||
parts = package.split(".")
|
||||
return ".".join(parts[:-1]), parts[-1]
|
||||
|
||||
|
||||
def dump_migrations_as_grit(name: str, migration_pairs: List[Tuple[str, str]]):
|
||||
"""Dump the migration pairs as a Grit file."""
|
||||
output = "language python"
|
||||
remapped = ",\n".join(
|
||||
[
|
||||
f"""
|
||||
[
|
||||
`{split_package(from_module)[0]}`,
|
||||
`{split_package(from_module)[1]}`,
|
||||
`{split_package(to_module)[0]}`,
|
||||
`{split_package(to_module)[1]}`
|
||||
]
|
||||
"""
|
||||
for from_module, to_module in migration_pairs
|
||||
]
|
||||
)
|
||||
pattern_name = f"langchain_migrate_{name}"
|
||||
output = f"""
|
||||
language python
|
||||
|
||||
// This migration is generated automatically - do not manually edit this file
|
||||
pattern {pattern_name}() {{
|
||||
find_replace_imports(list=[
|
||||
{remapped}
|
||||
])
|
||||
}}
|
||||
|
||||
// Add this for invoking directly
|
||||
{pattern_name}()
|
||||
"""
|
||||
return output
|
||||
52
libs/cli/langchain_cli/namespaces/migrate/glob_helpers.py
Normal file
52
libs/cli/langchain_cli/namespaces/migrate/glob_helpers.py
Normal file
@@ -0,0 +1,52 @@
|
||||
# Adapted from bump-pydantic
|
||||
# https://github.com/pydantic/bump-pydantic
|
||||
import fnmatch
|
||||
import re
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
|
||||
MATCH_SEP = r"(?:/|\\)"
|
||||
MATCH_SEP_OR_END = r"(?:/|\\|\Z)"
|
||||
MATCH_NON_RECURSIVE = r"[^/\\]*"
|
||||
MATCH_RECURSIVE = r"(?:.*)"
|
||||
|
||||
|
||||
def glob_to_re(pattern: str) -> str:
|
||||
"""Translate a glob pattern to a regular expression for matching."""
|
||||
fragments: List[str] = []
|
||||
for segment in re.split(r"/|\\", pattern):
|
||||
if segment == "":
|
||||
continue
|
||||
if segment == "**":
|
||||
# Remove previous separator match, so the recursive match c
|
||||
# can match zero or more segments.
|
||||
if fragments and fragments[-1] == MATCH_SEP:
|
||||
fragments.pop()
|
||||
fragments.append(MATCH_RECURSIVE)
|
||||
elif "**" in segment:
|
||||
raise ValueError(
|
||||
"invalid pattern: '**' can only be an entire path component"
|
||||
)
|
||||
else:
|
||||
fragment = fnmatch.translate(segment)
|
||||
fragment = fragment.replace(r"(?s:", r"(?:")
|
||||
fragment = fragment.replace(r".*", MATCH_NON_RECURSIVE)
|
||||
fragment = fragment.replace(r"\Z", r"")
|
||||
fragments.append(fragment)
|
||||
fragments.append(MATCH_SEP)
|
||||
# Remove trailing MATCH_SEP, so it can be replaced with MATCH_SEP_OR_END.
|
||||
if fragments and fragments[-1] == MATCH_SEP:
|
||||
fragments.pop()
|
||||
fragments.append(MATCH_SEP_OR_END)
|
||||
return rf"(?s:{''.join(fragments)})"
|
||||
|
||||
|
||||
def match_glob(path: Path, pattern: str) -> bool:
|
||||
"""Check if a path matches a glob pattern.
|
||||
|
||||
If the pattern ends with a directory separator, the path must be a directory.
|
||||
"""
|
||||
match = bool(re.fullmatch(glob_to_re(pattern), str(path)))
|
||||
if pattern.endswith("/") or pattern.endswith("\\"):
|
||||
return match and path.is_dir()
|
||||
return match
|
||||
@@ -1,74 +1,306 @@
|
||||
"""Migrate LangChain to the most recent version."""
|
||||
|
||||
# Adapted from bump-pydantic
|
||||
# https://github.com/pydantic/bump-pydantic
|
||||
import difflib
|
||||
import functools
|
||||
import multiprocessing
|
||||
import os
|
||||
import time
|
||||
import traceback
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Iterable, List, Optional, Tuple, Type, TypeVar, Union
|
||||
|
||||
import rich
|
||||
import libcst as cst
|
||||
import typer
|
||||
from gritql import run
|
||||
from typer import Option
|
||||
from libcst.codemod import CodemodContext, ContextAwareTransformer
|
||||
from libcst.helpers import calculate_module_and_package
|
||||
from libcst.metadata import FullRepoManager, FullyQualifiedNameProvider, ScopeProvider
|
||||
from rich.console import Console
|
||||
from rich.progress import Progress
|
||||
from typer import Argument, Exit, Option, Typer
|
||||
from typing_extensions import ParamSpec
|
||||
|
||||
from langchain_cli.namespaces.migrate.codemods import Rule, gather_codemods
|
||||
from langchain_cli.namespaces.migrate.glob_helpers import match_glob
|
||||
|
||||
app = Typer(invoke_without_command=True, add_completion=False)
|
||||
|
||||
P = ParamSpec("P")
|
||||
T = TypeVar("T")
|
||||
|
||||
DEFAULT_IGNORES = [".venv/**"]
|
||||
|
||||
|
||||
def get_gritdir_path() -> Path:
|
||||
"""Get the path to the grit directory."""
|
||||
script_dir = Path(__file__).parent
|
||||
return script_dir / ".grit"
|
||||
|
||||
|
||||
def migrate(
|
||||
ctx: typer.Context,
|
||||
# Using diff instead of dry-run for backwards compatibility with the old CLI
|
||||
diff: bool = Option(
|
||||
False,
|
||||
"--diff",
|
||||
help="Show the changes that would be made without applying them.",
|
||||
@app.callback()
|
||||
def main(
|
||||
path: Path = Argument(..., exists=True, dir_okay=True, allow_dash=False),
|
||||
disable: List[Rule] = Option(default=[], help="Disable a rule."),
|
||||
diff: bool = Option(False, help="Show diff instead of applying changes."),
|
||||
ignore: List[str] = Option(
|
||||
default=DEFAULT_IGNORES, help="Ignore a path glob pattern."
|
||||
),
|
||||
interactive: bool = Option(
|
||||
False,
|
||||
"--interactive",
|
||||
help="Prompt for confirmation before making each change",
|
||||
log_file: Path = Option("log.txt", help="Log errors to this file."),
|
||||
include_ipynb: bool = Option(
|
||||
False, help="Include Jupyter Notebook files in the migration."
|
||||
),
|
||||
) -> None:
|
||||
"""Migrate langchain to the most recent version.
|
||||
):
|
||||
"""Migrate langchain to the most recent version."""
|
||||
if not diff:
|
||||
if not typer.confirm(
|
||||
"✈️ This script will help you migrate to a recent version LangChain. "
|
||||
"This migration script will attempt to replace old imports in the code "
|
||||
"with new ones.\n\n"
|
||||
"🔄 You will need to run the migration script TWICE to migrate (e.g., "
|
||||
"to update llms import from langchain, the script will first move them to "
|
||||
"corresponding imports from the community package, and on the second "
|
||||
"run will migrate from the community package to the partner package "
|
||||
"when possible). \n\n"
|
||||
"🔍 You can pre-view the changes by running with the --diff flag. \n\n"
|
||||
"🚫 You can disable specific import changes by using the --disable "
|
||||
"flag. \n\n"
|
||||
"📄 Update your pyproject.toml or requirements.txt file to "
|
||||
"reflect any imports from new packages. For example, if you see new "
|
||||
"imports from langchain_openai, langchain_anthropic or "
|
||||
"langchain_text_splitters you "
|
||||
"should them to your dependencies! \n\n"
|
||||
'⚠️ This script is a "best-effort", and is likely to make some '
|
||||
"mistakes.\n\n"
|
||||
"🛡️ Backup your code prior to running the migration script -- it will "
|
||||
"modify your files!\n\n"
|
||||
"❓ Do you want to continue?"
|
||||
):
|
||||
raise Exit()
|
||||
console = Console(log_time=True)
|
||||
console.log("Start langchain-cli migrate")
|
||||
# NOTE: LIBCST_PARSER_TYPE=native is required according to https://github.com/Instagram/LibCST/issues/487.
|
||||
os.environ["LIBCST_PARSER_TYPE"] = "native"
|
||||
|
||||
Any undocumented arguments will be passed to the Grit CLI.
|
||||
if os.path.isfile(path):
|
||||
package = path.parent
|
||||
all_files = [path]
|
||||
else:
|
||||
package = path
|
||||
all_files = sorted(package.glob("**/*.py"))
|
||||
if include_ipynb:
|
||||
all_files.extend(sorted(package.glob("**/*.ipynb")))
|
||||
|
||||
filtered_files = [
|
||||
file
|
||||
for file in all_files
|
||||
if not any(match_glob(file, pattern) for pattern in ignore)
|
||||
]
|
||||
files = [str(file.relative_to(".")) for file in filtered_files]
|
||||
|
||||
if len(files) == 1:
|
||||
console.log("Found 1 file to process.")
|
||||
elif len(files) > 1:
|
||||
console.log(f"Found {len(files)} files to process.")
|
||||
else:
|
||||
console.log("No files to process.")
|
||||
raise Exit()
|
||||
|
||||
providers = {FullyQualifiedNameProvider, ScopeProvider}
|
||||
metadata_manager = FullRepoManager(".", files, providers=providers) # type: ignore[arg-type]
|
||||
metadata_manager.resolve_cache()
|
||||
|
||||
scratch: dict[str, Any] = {}
|
||||
start_time = time.time()
|
||||
|
||||
log_fp = log_file.open("a+", encoding="utf8")
|
||||
partial_run_codemods = functools.partial(
|
||||
get_and_run_codemods, disable, metadata_manager, scratch, package, diff
|
||||
)
|
||||
with Progress(*Progress.get_default_columns(), transient=True) as progress:
|
||||
task = progress.add_task(description="Executing codemods...", total=len(files))
|
||||
count_errors = 0
|
||||
difflines: List[List[str]] = []
|
||||
with multiprocessing.Pool() as pool:
|
||||
for error, _difflines in pool.imap_unordered(partial_run_codemods, files):
|
||||
progress.advance(task)
|
||||
|
||||
if _difflines is not None:
|
||||
difflines.append(_difflines)
|
||||
|
||||
if error is not None:
|
||||
count_errors += 1
|
||||
log_fp.writelines(error)
|
||||
|
||||
modified = [Path(f) for f in files if os.stat(f).st_mtime > start_time]
|
||||
|
||||
if not diff:
|
||||
if modified:
|
||||
console.log(f"Refactored {len(modified)} files.")
|
||||
else:
|
||||
console.log("No files were modified.")
|
||||
|
||||
for _difflines in difflines:
|
||||
color_diff(console, _difflines)
|
||||
|
||||
if count_errors > 0:
|
||||
console.log(f"Found {count_errors} errors. Please check the {log_file} file.")
|
||||
else:
|
||||
console.log("Run successfully!")
|
||||
|
||||
if difflines:
|
||||
raise Exit(1)
|
||||
|
||||
|
||||
def get_and_run_codemods(
|
||||
disabled_rules: List[Rule],
|
||||
metadata_manager: FullRepoManager,
|
||||
scratch: Dict[str, Any],
|
||||
package: Path,
|
||||
diff: bool,
|
||||
filename: str,
|
||||
) -> Tuple[Union[str, None], Union[List[str], None]]:
|
||||
"""Run codemods from rules.
|
||||
|
||||
Wrapper around run_codemods to be used with multiprocessing.Pool.
|
||||
"""
|
||||
rich.print(
|
||||
"✈️ This script will help you migrate to a LangChain 0.3. "
|
||||
"This migration script will attempt to replace old imports in the code "
|
||||
"with new ones. "
|
||||
"If you need to migrate to LangChain 0.2, please downgrade to version 0.0.29 "
|
||||
"of the langchain-cli.\n\n"
|
||||
"🔄 You will need to run the migration script TWICE to migrate (e.g., "
|
||||
"to update llms import from langchain, the script will first move them to "
|
||||
"corresponding imports from the community package, and on the second "
|
||||
"run will migrate from the community package to the partner package "
|
||||
"when possible). \n\n"
|
||||
"🔍 You can pre-view the changes by running with the --diff flag. \n\n"
|
||||
"🚫 You can disable specific import changes by using the --disable "
|
||||
"flag. \n\n"
|
||||
"📄 Update your pyproject.toml or requirements.txt file to "
|
||||
"reflect any imports from new packages. For example, if you see new "
|
||||
"imports from langchain_openai, langchain_anthropic or "
|
||||
"langchain_text_splitters you "
|
||||
"should them to your dependencies! \n\n"
|
||||
'⚠️ This script is a "best-effort", and is likely to make some '
|
||||
"mistakes.\n\n"
|
||||
"🛡️ Backup your code prior to running the migration script -- it will "
|
||||
"modify your files!\n\n"
|
||||
)
|
||||
rich.print("-" * 10)
|
||||
rich.print()
|
||||
codemods = gather_codemods(disabled=disabled_rules)
|
||||
return run_codemods(codemods, metadata_manager, scratch, package, diff, filename)
|
||||
|
||||
|
||||
def _rewrite_file(
|
||||
filename: str,
|
||||
codemods: List[Type[ContextAwareTransformer]],
|
||||
diff: bool,
|
||||
context: CodemodContext,
|
||||
) -> Tuple[Union[str, None], Union[List[str], None]]:
|
||||
file_path = Path(filename)
|
||||
with file_path.open("r+", encoding="utf-8") as fp:
|
||||
code = fp.read()
|
||||
fp.seek(0)
|
||||
|
||||
input_tree = cst.parse_module(code)
|
||||
|
||||
for codemod in codemods:
|
||||
transformer = codemod(context=context)
|
||||
output_tree = transformer.transform_module(input_tree)
|
||||
input_tree = output_tree
|
||||
|
||||
output_code = input_tree.code
|
||||
if code != output_code:
|
||||
if diff:
|
||||
lines = difflib.unified_diff(
|
||||
code.splitlines(keepends=True),
|
||||
output_code.splitlines(keepends=True),
|
||||
fromfile=filename,
|
||||
tofile=filename,
|
||||
)
|
||||
return None, list(lines)
|
||||
else:
|
||||
fp.write(output_code)
|
||||
fp.truncate()
|
||||
return None, None
|
||||
|
||||
|
||||
def _rewrite_notebook(
|
||||
filename: str,
|
||||
codemods: List[Type[ContextAwareTransformer]],
|
||||
diff: bool,
|
||||
context: CodemodContext,
|
||||
) -> Tuple[Optional[str], Optional[List[str]]]:
|
||||
"""Try to rewrite a Jupyter Notebook file."""
|
||||
import nbformat
|
||||
|
||||
file_path = Path(filename)
|
||||
if file_path.suffix != ".ipynb":
|
||||
raise ValueError("Only Jupyter Notebook files (.ipynb) are supported.")
|
||||
|
||||
with file_path.open("r", encoding="utf-8") as fp:
|
||||
notebook = nbformat.read(fp, as_version=4)
|
||||
|
||||
diffs = []
|
||||
|
||||
for cell in notebook.cells:
|
||||
if cell.cell_type == "code":
|
||||
code = "".join(cell.source)
|
||||
|
||||
# Skip code if any of the lines begin with a magic command or
|
||||
# a ! command.
|
||||
# We can try to handle later.
|
||||
if any(
|
||||
line.startswith("!") or line.startswith("%")
|
||||
for line in code.splitlines()
|
||||
):
|
||||
continue
|
||||
|
||||
input_tree = cst.parse_module(code)
|
||||
|
||||
# TODO(Team): Quick hack, need to figure out
|
||||
# how to handle this correctly.
|
||||
# This prevents the code from trying to re-insert the imports
|
||||
# for every cell in the notebook.
|
||||
local_context = CodemodContext()
|
||||
|
||||
for codemod in codemods:
|
||||
transformer = codemod(context=local_context)
|
||||
output_tree = transformer.transform_module(input_tree)
|
||||
input_tree = output_tree
|
||||
|
||||
output_code = input_tree.code
|
||||
if code != output_code:
|
||||
cell.source = output_code.splitlines(keepends=True)
|
||||
if diff:
|
||||
cell_diff = difflib.unified_diff(
|
||||
code.splitlines(keepends=True),
|
||||
output_code.splitlines(keepends=True),
|
||||
fromfile=filename,
|
||||
tofile=filename,
|
||||
)
|
||||
diffs.extend(list(cell_diff))
|
||||
|
||||
args = list(ctx.args)
|
||||
if interactive:
|
||||
args.append("--interactive")
|
||||
if diff:
|
||||
args.append("--dry-run")
|
||||
return None, diffs
|
||||
|
||||
final_code = run.apply_pattern(
|
||||
"langchain_all_migrations()",
|
||||
args,
|
||||
grit_dir=get_gritdir_path(),
|
||||
)
|
||||
with file_path.open("w", encoding="utf-8") as fp:
|
||||
nbformat.write(notebook, fp)
|
||||
|
||||
raise typer.Exit(code=final_code)
|
||||
return None, None
|
||||
|
||||
|
||||
def run_codemods(
|
||||
codemods: List[Type[ContextAwareTransformer]],
|
||||
metadata_manager: FullRepoManager,
|
||||
scratch: Dict[str, Any],
|
||||
package: Path,
|
||||
diff: bool,
|
||||
filename: str,
|
||||
) -> Tuple[Union[str, None], Union[List[str], None]]:
|
||||
try:
|
||||
module_and_package = calculate_module_and_package(str(package), filename)
|
||||
context = CodemodContext(
|
||||
metadata_manager=metadata_manager,
|
||||
filename=filename,
|
||||
full_module_name=module_and_package.name,
|
||||
full_package_name=module_and_package.package,
|
||||
)
|
||||
context.scratch.update(scratch)
|
||||
|
||||
if filename.endswith(".ipynb"):
|
||||
return _rewrite_notebook(filename, codemods, diff, context)
|
||||
else:
|
||||
return _rewrite_file(filename, codemods, diff, context)
|
||||
except cst.ParserSyntaxError as exc:
|
||||
return (
|
||||
f"A syntax error happened on {filename}. This file cannot be "
|
||||
f"formatted.\n"
|
||||
f"{exc}"
|
||||
), None
|
||||
except Exception:
|
||||
return f"An error happened on {filename}.\n{traceback.format_exc()}", None
|
||||
|
||||
|
||||
def color_diff(console: Console, lines: Iterable[str]) -> None:
|
||||
for line in lines:
|
||||
line = line.rstrip("\n")
|
||||
if line.startswith("+"):
|
||||
console.print(line, style="green")
|
||||
elif line.startswith("-"):
|
||||
console.print(line, style="red")
|
||||
elif line.startswith("^"):
|
||||
console.print(line, style="blue")
|
||||
else:
|
||||
console.print(line, style="white")
|
||||
|
||||
@@ -7,7 +7,7 @@ readme = "README.md"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.9,<4.0"
|
||||
langchain-core = "^0.3.0"
|
||||
langchain-core = "^0.3.0.dev"
|
||||
langchain-openai = ">=0.0.1"
|
||||
|
||||
|
||||
|
||||
61
libs/cli/poetry.lock
generated
61
libs/cli/poetry.lock
generated
@@ -1,4 +1,4 @@
|
||||
# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "annotated-types"
|
||||
@@ -263,17 +263,6 @@ gitdb = ">=4.0.1,<5"
|
||||
doc = ["sphinx (==4.3.2)", "sphinx-autodoc-typehints", "sphinx-rtd-theme", "sphinxcontrib-applehelp (>=1.0.2,<=1.0.4)", "sphinxcontrib-devhelp (==1.0.2)", "sphinxcontrib-htmlhelp (>=2.0.0,<=2.0.1)", "sphinxcontrib-qthelp (==1.0.3)", "sphinxcontrib-serializinghtml (==1.1.5)"]
|
||||
test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "typing-extensions"]
|
||||
|
||||
[[package]]
|
||||
name = "gritql"
|
||||
version = "0.1.4"
|
||||
description = "Python bindings for GritQL"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8"
|
||||
files = [
|
||||
{file = "gritql-0.1.4-py3-none-any.whl", hash = "sha256:6e9f8c638bbf3dda58222832c976c716a1ca02a920e7549df58bf1a0bb9ebeef"},
|
||||
{file = "gritql-0.1.4.tar.gz", hash = "sha256:487d0c1a98cb17cc9681121e53ac15f39e1cb87c4317a2ca1872e33d3d3a0a47"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "h11"
|
||||
version = "0.14.0"
|
||||
@@ -429,8 +418,8 @@ jsonpatch = ">=1.33,<2.0"
|
||||
langsmith = ">=0.1.75,<0.2.0"
|
||||
packaging = ">=23.2,<25"
|
||||
pydantic = [
|
||||
{version = ">=2.7.4,<3.0.0", markers = "python_full_version >= \"3.12.4\""},
|
||||
{version = ">=1,<3", markers = "python_full_version < \"3.12.4\""},
|
||||
{version = ">=2.7.4,<3.0.0", markers = "python_full_version >= \"3.12.4\""},
|
||||
]
|
||||
PyYAML = ">=5.3"
|
||||
tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<9.0.0"
|
||||
@@ -476,11 +465,51 @@ files = [
|
||||
httpx = ">=0.23.0,<1"
|
||||
orjson = ">=3.9.14,<4.0.0"
|
||||
pydantic = [
|
||||
{version = ">=2.7.4,<3.0.0", markers = "python_full_version >= \"3.12.4\""},
|
||||
{version = ">=1,<3", markers = "python_full_version < \"3.12.4\""},
|
||||
{version = ">=2.7.4,<3.0.0", markers = "python_full_version >= \"3.12.4\""},
|
||||
]
|
||||
requests = ">=2,<3"
|
||||
|
||||
[[package]]
|
||||
name = "libcst"
|
||||
version = "1.4.0"
|
||||
description = "A concrete syntax tree with AST-like properties for Python 3.0 through 3.12 programs."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "libcst-1.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:279b54568ea1f25add50ea4ba3d76d4f5835500c82f24d54daae4c5095b986aa"},
|
||||
{file = "libcst-1.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3401dae41fe24565387a65baee3887e31a44e3e58066b0250bc3f3ccf85b1b5a"},
|
||||
{file = "libcst-1.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1989fa12d3cd79118ebd29ebe2a6976d23d509b1a4226bc3d66fcb7cb50bd5d"},
|
||||
{file = "libcst-1.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:addc6d585141a7677591868886f6bda0577529401a59d210aa8112114340e129"},
|
||||
{file = "libcst-1.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:17d71001cb25e94cfe8c3d997095741a8c4aa7a6d234c0f972bc42818c88dfaf"},
|
||||
{file = "libcst-1.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:2d47de16d105e7dd5f4e01a428d9f4dc1e71efd74f79766daf54528ce37f23c3"},
|
||||
{file = "libcst-1.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e6227562fc5c9c1efd15dfe90b0971ae254461b8b6b23c1b617139b6003de1c1"},
|
||||
{file = "libcst-1.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3399e6c95df89921511b44d8c5bf6a75bcbc2d51f1f6429763609ba005c10f6b"},
|
||||
{file = "libcst-1.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48601e3e590e2d6a7ab8c019cf3937c70511a78d778ab3333764531253acdb33"},
|
||||
{file = "libcst-1.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f42797309bb725f0f000510d5463175ccd7155395f09b5e7723971b0007a976d"},
|
||||
{file = "libcst-1.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb4e42ea107a37bff7f9fdbee9532d39f9ea77b89caa5c5112b37057b12e0838"},
|
||||
{file = "libcst-1.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:9d0cc3c5a2a51fa7e1d579a828c0a2e46b2170024fd8b1a0691c8a52f3abb2d9"},
|
||||
{file = "libcst-1.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7ece51d935bc9bf60b528473d2e5cc67cbb88e2f8146297e40ee2c7d80be6f13"},
|
||||
{file = "libcst-1.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:81653dea1cdfa4c6520a7c5ffb95fa4d220cbd242e446c7a06d42d8636bfcbba"},
|
||||
{file = "libcst-1.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6abce0e66bba2babfadc20530fd3688f672d565674336595b4623cd800b91ef"},
|
||||
{file = "libcst-1.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5da9d7dc83801aba3b8d911f82dc1a375db0d508318bad79d9fb245374afe068"},
|
||||
{file = "libcst-1.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c54aa66c86d8ece9c93156a2cf5ca512b0dce40142fe9e072c86af2bf892411"},
|
||||
{file = "libcst-1.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:62e2682ee1567b6a89c91853865372bf34f178bfd237853d84df2b87b446e654"},
|
||||
{file = "libcst-1.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b8ecdba8934632b4dadacb666cd3816627a6ead831b806336972ccc4ba7ca0e9"},
|
||||
{file = "libcst-1.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8e54c777b8d27339b70f304d16fc8bc8674ef1bd34ed05ea874bf4921eb5a313"},
|
||||
{file = "libcst-1.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:061d6855ef30efe38b8a292b7e5d57c8e820e71fc9ec9846678b60a934b53bbb"},
|
||||
{file = "libcst-1.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb0abf627ee14903d05d0ad9b2c6865f1b21eb4081e2c7bea1033f85db2b8bae"},
|
||||
{file = "libcst-1.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d024f44059a853b4b852cfc04fec33e346659d851371e46fc8e7c19de24d3da9"},
|
||||
{file = "libcst-1.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:3c6a8faab9da48c5b371557d0999b4ca51f4f2cbd37ee8c2c4df0ac01c781465"},
|
||||
{file = "libcst-1.4.0.tar.gz", hash = "sha256:449e0b16604f054fa7f27c3ffe86ea7ef6c409836fe68fe4e752a1894175db00"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
pyyaml = ">=5.2"
|
||||
|
||||
[package.extras]
|
||||
dev = ["Sphinx (>=5.1.1)", "black (==23.12.1)", "build (>=0.10.0)", "coverage (>=4.5.4)", "fixit (==2.1.0)", "flake8 (==7.0.0)", "hypothesis (>=4.36.0)", "hypothesmith (>=0.0.4)", "jinja2 (==3.1.4)", "jupyter (>=1.0.0)", "maturin (>=0.8.3,<1.6)", "nbsphinx (>=0.4.2)", "prompt-toolkit (>=2.0.9)", "pyre-check (==0.9.18)", "setuptools-rust (>=1.5.2)", "setuptools-scm (>=6.0.1)", "slotscheck (>=0.7.1)", "sphinx-rtd-theme (>=0.4.3)", "ufmt (==2.6.0)", "usort (==1.0.8.post1)"]
|
||||
|
||||
[[package]]
|
||||
name = "markdown-it-py"
|
||||
version = "3.0.0"
|
||||
@@ -652,8 +681,8 @@ files = [
|
||||
annotated-types = ">=0.4.0"
|
||||
pydantic-core = "2.23.2"
|
||||
typing-extensions = [
|
||||
{version = ">=4.12.2", markers = "python_version >= \"3.13\""},
|
||||
{version = ">=4.6.1", markers = "python_version < \"3.13\""},
|
||||
{version = ">=4.12.2", markers = "python_version >= \"3.13\""},
|
||||
]
|
||||
tzdata = {version = "*", markers = "python_version >= \"3.9\""}
|
||||
|
||||
@@ -1365,4 +1394,4 @@ serve = []
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.9,<4.0"
|
||||
content-hash = "733676371ee89686b906db3bc190d39128fb664bf34d9b3ccd66aac75c8cc2aa"
|
||||
content-hash = "d7ef8a78c84458975d2ff479af00f4bde06e77f25f8306c64aef5bdb34f34798"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[tool.poetry]
|
||||
name = "langchain-cli"
|
||||
version = "0.0.31"
|
||||
version = "0.0.30"
|
||||
description = "CLI for interacting with LangChain"
|
||||
authors = ["Erick Friis <erick@langchain.dev>"]
|
||||
readme = "README.md"
|
||||
@@ -18,7 +18,7 @@ gitpython = "^3.1.40"
|
||||
langserve = { extras = ["all"], version = ">=0.0.51" }
|
||||
uvicorn = "^0.23.2"
|
||||
tomlkit = "^0.12.2"
|
||||
gritql = "^0.1.1"
|
||||
libcst = { version = "^1.3.1", python = "^3.9" }
|
||||
|
||||
[tool.poetry.scripts]
|
||||
langchain = "langchain_cli.cli:app"
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Script to generate migrations for the migration script."""
|
||||
|
||||
import json
|
||||
import os
|
||||
import pkgutil
|
||||
|
||||
import click
|
||||
@@ -9,9 +8,6 @@ import click
|
||||
from langchain_cli.namespaces.migrate.generate.generic import (
|
||||
generate_simplified_migrations,
|
||||
)
|
||||
from langchain_cli.namespaces.migrate.generate.grit import (
|
||||
dump_migrations_as_grit,
|
||||
)
|
||||
from langchain_cli.namespaces.migrate.generate.partner import (
|
||||
get_migrations_for_partner_package,
|
||||
)
|
||||
@@ -42,48 +38,16 @@ def cli():
|
||||
default=True,
|
||||
help="Output file for the migration script.",
|
||||
)
|
||||
@click.option(
|
||||
"--format",
|
||||
type=click.Choice(["json", "grit"], case_sensitive=False),
|
||||
default="json",
|
||||
help="The output format for the migration script (json or grit).",
|
||||
)
|
||||
def generic(
|
||||
pkg1: str, pkg2: str, output: str, filter_by_all: bool, format: str
|
||||
) -> None:
|
||||
def generic(pkg1: str, pkg2: str, output: str, filter_by_all: bool) -> None:
|
||||
"""Generate a migration script."""
|
||||
click.echo("Migration script generated.")
|
||||
migrations = generate_simplified_migrations(pkg1, pkg2, filter_by_all=filter_by_all)
|
||||
|
||||
if output is not None:
|
||||
name = output.removesuffix(".json").removesuffix(".grit")
|
||||
else:
|
||||
name = f"{pkg1}_to_{pkg2}"
|
||||
|
||||
if output is None:
|
||||
output = f"{name}.json" if format == "json" else f"{name}.grit"
|
||||
|
||||
if format == "json":
|
||||
dumped = json.dumps(migrations, indent=2, sort_keys=True)
|
||||
else:
|
||||
dumped = dump_migrations_as_grit(name, migrations)
|
||||
output = f"{pkg1}_to_{pkg2}.json"
|
||||
|
||||
with open(output, "w") as f:
|
||||
f.write(dumped)
|
||||
|
||||
|
||||
def handle_partner(pkg: str, output: str = None):
|
||||
migrations = get_migrations_for_partner_package(pkg)
|
||||
# Run with python 3.9+
|
||||
name = pkg.removeprefix("langchain_")
|
||||
data = dump_migrations_as_grit(name, migrations)
|
||||
output_name = f"{name}.grit" if output is None else output
|
||||
if migrations:
|
||||
with open(output_name, "w") as f:
|
||||
f.write(data)
|
||||
click.secho(f"LangChain migration script saved to {output_name}")
|
||||
else:
|
||||
click.secho(f"No migrations found for {pkg}", fg="yellow")
|
||||
f.write(json.dumps(migrations, indent=2, sort_keys=True))
|
||||
|
||||
|
||||
@cli.command()
|
||||
@@ -92,27 +56,21 @@ def handle_partner(pkg: str, output: str = None):
|
||||
def partner(pkg: str, output: str) -> None:
|
||||
"""Generate migration scripts specifically for LangChain modules."""
|
||||
click.echo("Migration script for LangChain generated.")
|
||||
handle_partner(pkg, output)
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.argument("json_file")
|
||||
def json_to_grit(json_file: str) -> None:
|
||||
"""Generate a Grit migration from an old JSON migration file."""
|
||||
with open(json_file, "r") as f:
|
||||
migrations = json.load(f)
|
||||
name = os.path.basename(json_file).removesuffix(".json").removesuffix(".grit")
|
||||
data = dump_migrations_as_grit(name, migrations)
|
||||
output_name = f"{name}.grit"
|
||||
with open(output_name, "w") as f:
|
||||
f.write(data)
|
||||
click.secho(f"GritQL migration script saved to {output_name}")
|
||||
migrations = get_migrations_for_partner_package(pkg)
|
||||
# Run with python 3.9+
|
||||
output_name = f"{pkg.removeprefix('langchain_')}.json" if output is None else output
|
||||
if migrations:
|
||||
with open(output_name, "w") as f:
|
||||
f.write(json.dumps(migrations, indent=2, sort_keys=True))
|
||||
click.secho(f"LangChain migration script saved to {output_name}")
|
||||
else:
|
||||
click.secho(f"No migrations found for {pkg}", fg="yellow")
|
||||
|
||||
|
||||
@cli.command()
|
||||
def all_installed_partner_pkgs() -> None:
|
||||
"""Generate migration scripts for all LangChain modules."""
|
||||
# Will generate migrations for all partner packages.
|
||||
# Will generate migrations for all pather packages.
|
||||
# Define as "langchain_<partner_name>".
|
||||
# First let's determine which packages are installed in the environment
|
||||
# and then generate migrations for them.
|
||||
@@ -123,7 +81,15 @@ def all_installed_partner_pkgs() -> None:
|
||||
and name not in {"langchain_core", "langchain_cli", "langchain_community"}
|
||||
]
|
||||
for pkg in langchain_pkgs:
|
||||
handle_partner(pkg)
|
||||
migrations = get_migrations_for_partner_package(pkg)
|
||||
# Run with python 3.9+
|
||||
output_name = f"{pkg.removeprefix('langchain_')}.json"
|
||||
if migrations:
|
||||
with open(output_name, "w") as f:
|
||||
f.write(json.dumps(migrations, indent=2, sort_keys=True))
|
||||
click.secho(f"LangChain migration script saved to {output_name}")
|
||||
else:
|
||||
click.secho(f"No migrations found for {pkg}", fg="yellow")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -3,14 +3,14 @@ from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
|
||||
pytest.importorskip("gritql")
|
||||
pytest.importorskip("libcst")
|
||||
|
||||
import difflib
|
||||
from pathlib import Path
|
||||
|
||||
from typer.testing import CliRunner
|
||||
|
||||
from langchain_cli.cli import app
|
||||
from langchain_cli.namespaces.migrate.main import app
|
||||
from tests.unit_tests.migrate.cli_runner.cases import before, expected
|
||||
from tests.unit_tests.migrate.cli_runner.folder import Folder
|
||||
|
||||
@@ -47,7 +47,7 @@ def test_command_line(tmp_path: Path) -> None:
|
||||
with runner.isolated_filesystem(temp_dir=tmp_path) as td:
|
||||
before.create_structure(root=Path(td))
|
||||
# The input is used to force through the confirmation.
|
||||
result = runner.invoke(app, ["migrate", before.name, "--force"])
|
||||
result = runner.invoke(app, [before.name], input="y\n")
|
||||
assert result.exit_code == 0, result.output
|
||||
|
||||
after = Folder.from_structure(Path(td) / before.name)
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import pytest
|
||||
from langchain._api import suppress_langchain_deprecation_warning as sup2
|
||||
from langchain_core._api import suppress_langchain_deprecation_warning as sup1
|
||||
|
||||
@@ -7,7 +6,6 @@ from langchain_cli.namespaces.migrate.generate.generic import (
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.xfail(reason="Unknown reason")
|
||||
def test_create_json_agent_migration() -> None:
|
||||
"""Test the migration of create_json_agent from langchain to langchain_community."""
|
||||
with sup1():
|
||||
@@ -36,7 +34,6 @@ def test_create_json_agent_migration() -> None:
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.xfail(reason="Unknown reason")
|
||||
def test_create_single_store_retriever_db() -> None:
|
||||
"""Test migration from langchain to langchain_core"""
|
||||
with sup1():
|
||||
|
||||
32
libs/cli/tests/unit_tests/migrate/test_code_migrations.py
Normal file
32
libs/cli/tests/unit_tests/migrate/test_code_migrations.py
Normal file
@@ -0,0 +1,32 @@
|
||||
"""Verify that the code migrations do not involve alias changes.
|
||||
|
||||
Migration script only updates imports not the rest of the code that uses the
|
||||
import.
|
||||
"""
|
||||
|
||||
from langchain_cli.namespaces.migrate.codemods.replace_imports import (
|
||||
RULE_TO_PATHS,
|
||||
_load_migrations_from_fixtures,
|
||||
)
|
||||
|
||||
|
||||
def test_migration_files() -> None:
|
||||
"""Generate a codemod to replace imports."""
|
||||
errors = []
|
||||
|
||||
for paths in list(RULE_TO_PATHS.values()):
|
||||
for path in paths:
|
||||
migrations = _load_migrations_from_fixtures([path])
|
||||
|
||||
for migration in migrations:
|
||||
old = migration[0].split(".")[-1]
|
||||
new = migration[1].split(".")[-1]
|
||||
if old != new:
|
||||
errors.append((path, migration))
|
||||
|
||||
if errors:
|
||||
raise ValueError(
|
||||
f"Migration involves an alias change: {errors}. The "
|
||||
f"migration script does not currently support "
|
||||
f"corresponding code changes."
|
||||
)
|
||||
72
libs/cli/tests/unit_tests/migrate/test_glob_helpers.py
Normal file
72
libs/cli/tests/unit_tests/migrate/test_glob_helpers.py
Normal file
@@ -0,0 +1,72 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from langchain_cli.namespaces.migrate.glob_helpers import glob_to_re, match_glob
|
||||
|
||||
|
||||
class TestGlobHelpers:
|
||||
match_glob_values: list[tuple[str, Path, bool]] = [
|
||||
("foo", Path("foo"), True),
|
||||
("foo", Path("bar"), False),
|
||||
("foo", Path("foo/bar"), False),
|
||||
("*", Path("foo"), True),
|
||||
("*", Path("bar"), True),
|
||||
("*", Path("foo/bar"), False),
|
||||
("**", Path("foo"), True),
|
||||
("**", Path("foo/bar"), True),
|
||||
("**", Path("foo/bar/baz/qux"), True),
|
||||
("foo/bar", Path("foo/bar"), True),
|
||||
("foo/bar", Path("foo"), False),
|
||||
("foo/bar", Path("far"), False),
|
||||
("foo/bar", Path("foo/foo"), False),
|
||||
("foo/*", Path("foo/bar"), True),
|
||||
("foo/*", Path("foo/bar/baz"), False),
|
||||
("foo/*", Path("foo"), False),
|
||||
("foo/*", Path("bar"), False),
|
||||
("foo/**", Path("foo/bar"), True),
|
||||
("foo/**", Path("foo/bar/baz"), True),
|
||||
("foo/**", Path("foo/bar/baz/qux"), True),
|
||||
("foo/**", Path("foo"), True),
|
||||
("foo/**", Path("bar"), False),
|
||||
("foo/**/bar", Path("foo/bar"), True),
|
||||
("foo/**/bar", Path("foo/baz/bar"), True),
|
||||
("foo/**/bar", Path("foo/baz/qux/bar"), True),
|
||||
("foo/**/bar", Path("foo/baz/qux"), False),
|
||||
("foo/**/bar", Path("foo/bar/baz"), False),
|
||||
("foo/**/bar", Path("foo/bar/bar"), True),
|
||||
("foo/**/bar", Path("foo"), False),
|
||||
("foo/**/bar", Path("bar"), False),
|
||||
("foo/**/*/bar", Path("foo/bar"), False),
|
||||
("foo/**/*/bar", Path("foo/baz/bar"), True),
|
||||
("foo/**/*/bar", Path("foo/baz/qux/bar"), True),
|
||||
("foo/**/*/bar", Path("foo/baz/qux"), False),
|
||||
("foo/**/*/bar", Path("foo/bar/baz"), False),
|
||||
("foo/**/*/bar", Path("foo/bar/bar"), True),
|
||||
("foo/**/*/bar", Path("foo"), False),
|
||||
("foo/**/*/bar", Path("bar"), False),
|
||||
("foo/ba*", Path("foo/bar"), True),
|
||||
("foo/ba*", Path("foo/baz"), True),
|
||||
("foo/ba*", Path("foo/qux"), False),
|
||||
("foo/ba*", Path("foo/baz/qux"), False),
|
||||
("foo/ba*", Path("foo/bar/baz"), False),
|
||||
("foo/ba*", Path("foo"), False),
|
||||
("foo/ba*", Path("bar"), False),
|
||||
("foo/**/ba*/*/qux", Path("foo/a/b/c/bar/a/qux"), True),
|
||||
("foo/**/ba*/*/qux", Path("foo/a/b/c/baz/a/qux"), True),
|
||||
("foo/**/ba*/*/qux", Path("foo/a/bar/a/qux"), True),
|
||||
("foo/**/ba*/*/qux", Path("foo/baz/a/qux"), True),
|
||||
("foo/**/ba*/*/qux", Path("foo/baz/qux"), False),
|
||||
("foo/**/ba*/*/qux", Path("foo/a/b/c/qux/a/qux"), False),
|
||||
("foo/**/ba*/*/qux", Path("foo"), False),
|
||||
("foo/**/ba*/*/qux", Path("bar"), False),
|
||||
]
|
||||
|
||||
@pytest.mark.parametrize(("pattern", "path", "expected"), match_glob_values)
|
||||
def test_match_glob(self, pattern: str, path: Path, expected: bool):
|
||||
expr = glob_to_re(pattern)
|
||||
assert (
|
||||
match_glob(path, pattern) == expected
|
||||
), f"path: {path}, pattern: {pattern}, expr: {expr}"
|
||||
60
libs/cli/tests/unit_tests/migrate/test_replace_imports.py
Normal file
60
libs/cli/tests/unit_tests/migrate/test_replace_imports.py
Normal file
@@ -0,0 +1,60 @@
|
||||
# ruff: noqa: E402
|
||||
import pytest
|
||||
|
||||
pytest.importorskip("libcst")
|
||||
|
||||
|
||||
from libcst.codemod import CodemodTest
|
||||
|
||||
from langchain_cli.namespaces.migrate.codemods.replace_imports import (
|
||||
generate_import_replacer,
|
||||
)
|
||||
|
||||
ReplaceImportsCodemod = generate_import_replacer(
|
||||
[
|
||||
"langchain_to_community",
|
||||
"community_to_partner",
|
||||
"langchain_to_core",
|
||||
"community_to_core",
|
||||
]
|
||||
) # type: ignore[attr-defined]
|
||||
|
||||
|
||||
class TestReplaceImportsCommand(CodemodTest):
|
||||
TRANSFORM = ReplaceImportsCodemod
|
||||
|
||||
def test_single_import(self) -> None:
|
||||
before = """
|
||||
from langchain.chat_models import ChatOpenAI
|
||||
"""
|
||||
after = """
|
||||
from langchain_community.chat_models import ChatOpenAI
|
||||
"""
|
||||
self.assertCodemod(before, after)
|
||||
|
||||
def test_from_community_to_partner(self) -> None:
|
||||
"""Test that we can replace imports from community to partner."""
|
||||
before = """
|
||||
from langchain_community.chat_models import ChatOpenAI
|
||||
"""
|
||||
after = """
|
||||
from langchain_openai import ChatOpenAI
|
||||
"""
|
||||
self.assertCodemod(before, after)
|
||||
|
||||
def test_noop_import(self) -> None:
|
||||
code = """
|
||||
from foo import ChatOpenAI
|
||||
"""
|
||||
self.assertCodemod(code, code)
|
||||
|
||||
def test_mixed_imports(self) -> None:
|
||||
before = """
|
||||
from langchain_community.chat_models import ChatOpenAI, ChatAnthropic, foo
|
||||
"""
|
||||
after = """
|
||||
from langchain_community.chat_models import foo
|
||||
from langchain_anthropic import ChatAnthropic
|
||||
from langchain_openai import ChatOpenAI
|
||||
"""
|
||||
self.assertCodemod(before, after)
|
||||
@@ -1,9 +1,6 @@
|
||||
import pytest
|
||||
|
||||
from langchain_cli.utils.events import EventDict, create_events
|
||||
|
||||
|
||||
@pytest.mark.xfail(reason="Unknown reason")
|
||||
def test_create_events() -> None:
|
||||
assert create_events(
|
||||
[EventDict(event="Test Event", properties={"test": "test"})]
|
||||
|
||||
@@ -19,7 +19,7 @@ test tests:
|
||||
poetry run pytest --disable-socket --allow-unix-socket $(TEST_FILE)
|
||||
|
||||
integration_tests:
|
||||
poetry run pytest -m runs $(TEST_FILE)
|
||||
poetry run pytest $(TEST_FILE)
|
||||
|
||||
test_watch:
|
||||
poetry run ptw --disable-socket --allow-unix-socket --snapshot-update --now . -- -vv tests/unit_tests
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
-r pdf_loader_deps.txt
|
||||
-r other_deps.txt
|
||||
@@ -1,4 +0,0 @@
|
||||
pdfminer-six>=20221105,<20240706
|
||||
pymupdf>=1.22.3,<2
|
||||
pypdf>=3.4.0,<5
|
||||
pypdfium2>=4.10.0,<5
|
||||
@@ -54,6 +54,7 @@ openapi-pydantic>=0.3.2,<0.4
|
||||
oracle-ads>=2.9.1,<3
|
||||
oracledb>=2.2.0,<3
|
||||
pandas>=2.0.1,<3
|
||||
pdfminer-six>=20221105,<20240706
|
||||
pgvector>=0.1.6,<0.2
|
||||
praw>=7.7.1,<8
|
||||
premai>=0.3.25,<0.4
|
||||
@@ -61,6 +62,9 @@ psychicapi>=0.8.0,<0.9
|
||||
pydantic>=2.7.4,<3
|
||||
py-trello>=0.19.0,<0.20
|
||||
pyjwt>=2.8.0,<3
|
||||
pymupdf>=1.22.3,<2
|
||||
pypdf>=3.4.0,<5
|
||||
pypdfium2>=4.10.0,<5
|
||||
pyspark>=3.4.0,<4
|
||||
rank-bm25>=0.2.2,<0.3
|
||||
rapidfuzz>=3.1.1,<4
|
||||
@@ -301,7 +301,7 @@ class OpenAIAssistantV2Runnable(OpenAIAssistantRunnable):
|
||||
inheritable_metadata=config.get("metadata"),
|
||||
)
|
||||
run_manager = callback_manager.on_chain_start(
|
||||
dumpd(self), input, name=config.get("run_name") or self.get_name()
|
||||
dumpd(self), input, name=config.get("run_name")
|
||||
)
|
||||
|
||||
files = _convert_file_ids_into_attachments(kwargs.get("file_ids", []))
|
||||
@@ -437,7 +437,7 @@ class OpenAIAssistantV2Runnable(OpenAIAssistantRunnable):
|
||||
inheritable_metadata=config.get("metadata"),
|
||||
)
|
||||
run_manager = callback_manager.on_chain_start(
|
||||
dumpd(self), input, name=config.get("run_name") or self.get_name()
|
||||
dumpd(self), input, name=config.get("run_name")
|
||||
)
|
||||
|
||||
files = _convert_file_ids_into_attachments(kwargs.get("file_ids", []))
|
||||
|
||||
@@ -5,7 +5,6 @@ from __future__ import annotations
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
@@ -147,33 +146,6 @@ def _convert_delta_to_message_chunk(
|
||||
return default_class(content=content) # type: ignore[call-arg]
|
||||
|
||||
|
||||
def _update_token_usage(
|
||||
overall_token_usage: Union[int, dict], new_usage: Union[int, dict]
|
||||
) -> Union[int, dict]:
|
||||
# Token usage is either ints or dictionaries
|
||||
# `reasoning_tokens` is nested inside `completion_tokens_details`
|
||||
if isinstance(new_usage, int):
|
||||
if not isinstance(overall_token_usage, int):
|
||||
raise ValueError(
|
||||
f"Got different types for token usage: "
|
||||
f"{type(new_usage)} and {type(overall_token_usage)}"
|
||||
)
|
||||
return new_usage + overall_token_usage
|
||||
elif isinstance(new_usage, dict):
|
||||
if not isinstance(overall_token_usage, dict):
|
||||
raise ValueError(
|
||||
f"Got different types for token usage: "
|
||||
f"{type(new_usage)} and {type(overall_token_usage)}"
|
||||
)
|
||||
return {
|
||||
k: _update_token_usage(overall_token_usage.get(k, 0), v)
|
||||
for k, v in new_usage.items()
|
||||
}
|
||||
else:
|
||||
warnings.warn(f"Unexpected type for token usage: {type(new_usage)}")
|
||||
return new_usage
|
||||
|
||||
|
||||
@deprecated(
|
||||
since="0.0.10", removal="1.0", alternative_import="langchain_openai.ChatOpenAI"
|
||||
)
|
||||
@@ -404,9 +376,7 @@ class ChatOpenAI(BaseChatModel):
|
||||
if token_usage is not None:
|
||||
for k, v in token_usage.items():
|
||||
if k in overall_token_usage:
|
||||
overall_token_usage[k] = _update_token_usage(
|
||||
overall_token_usage[k], v
|
||||
)
|
||||
overall_token_usage[k] += v
|
||||
else:
|
||||
overall_token_usage[k] = v
|
||||
if system_fingerprint is None:
|
||||
|
||||
@@ -20,37 +20,13 @@ class MongodbLoader(BaseLoader):
|
||||
*,
|
||||
filter_criteria: Optional[Dict] = None,
|
||||
field_names: Optional[Sequence[str]] = None,
|
||||
metadata_names: Optional[Sequence[str]] = None,
|
||||
include_db_collection_in_metadata: bool = True,
|
||||
) -> None:
|
||||
"""
|
||||
Initializes the MongoDB loader with necessary database connection
|
||||
details and configurations.
|
||||
|
||||
Args:
|
||||
connection_string (str): MongoDB connection URI.
|
||||
db_name (str):Name of the database to connect to.
|
||||
collection_name (str): Name of the collection to fetch documents from.
|
||||
filter_criteria (Optional[Dict]): MongoDB filter criteria for querying
|
||||
documents.
|
||||
field_names (Optional[Sequence[str]]): List of field names to retrieve
|
||||
from documents.
|
||||
metadata_names (Optional[Sequence[str]]): Additional metadata fields to
|
||||
extract from documents.
|
||||
include_db_collection_in_metadata (bool): Flag to include database and
|
||||
collection names in metadata.
|
||||
|
||||
Raises:
|
||||
ImportError: If the motor library is not installed.
|
||||
ValueError: If any necessary argument is missing.
|
||||
"""
|
||||
try:
|
||||
from motor.motor_asyncio import AsyncIOMotorClient
|
||||
except ImportError as e:
|
||||
raise ImportError(
|
||||
"Cannot import from motor, please install with `pip install motor`."
|
||||
) from e
|
||||
|
||||
if not connection_string:
|
||||
raise ValueError("connection_string must be provided.")
|
||||
|
||||
@@ -63,10 +39,8 @@ class MongodbLoader(BaseLoader):
|
||||
self.client = AsyncIOMotorClient(connection_string)
|
||||
self.db_name = db_name
|
||||
self.collection_name = collection_name
|
||||
self.field_names = field_names or []
|
||||
self.field_names = field_names
|
||||
self.filter_criteria = filter_criteria or {}
|
||||
self.metadata_names = metadata_names or []
|
||||
self.include_db_collection_in_metadata = include_db_collection_in_metadata
|
||||
|
||||
self.db = self.client.get_database(db_name)
|
||||
self.collection = self.db.get_collection(collection_name)
|
||||
@@ -86,24 +60,36 @@ class MongodbLoader(BaseLoader):
|
||||
return asyncio.run(self.aload())
|
||||
|
||||
async def aload(self) -> List[Document]:
|
||||
"""Asynchronously loads data into Document objects."""
|
||||
"""Load data into Document objects."""
|
||||
result = []
|
||||
total_docs = await self.collection.count_documents(self.filter_criteria)
|
||||
|
||||
projection = self._construct_projection()
|
||||
# Construct the projection dictionary if field_names are specified
|
||||
projection = (
|
||||
{field: 1 for field in self.field_names} if self.field_names else None
|
||||
)
|
||||
|
||||
async for doc in self.collection.find(self.filter_criteria, projection):
|
||||
metadata = self._extract_fields(doc, self.metadata_names, default="")
|
||||
|
||||
# Optionally add database and collection names to metadata
|
||||
if self.include_db_collection_in_metadata:
|
||||
metadata.update(
|
||||
{"database": self.db_name, "collection": self.collection_name}
|
||||
)
|
||||
metadata = {
|
||||
"database": self.db_name,
|
||||
"collection": self.collection_name,
|
||||
}
|
||||
|
||||
# Extract text content from filtered fields or use the entire document
|
||||
if self.field_names is not None:
|
||||
fields = self._extract_fields(doc, self.field_names, default="")
|
||||
fields = {}
|
||||
for name in self.field_names:
|
||||
# Split the field names to handle nested fields
|
||||
keys = name.split(".")
|
||||
value = doc
|
||||
for key in keys:
|
||||
if key in value:
|
||||
value = value[key]
|
||||
else:
|
||||
value = ""
|
||||
break
|
||||
fields[name] = value
|
||||
|
||||
texts = [str(value) for value in fields.values()]
|
||||
text = " ".join(texts)
|
||||
else:
|
||||
@@ -118,29 +104,3 @@ class MongodbLoader(BaseLoader):
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
def _construct_projection(self) -> Optional[Dict]:
|
||||
"""Constructs the projection dictionary for MongoDB query based
|
||||
on the specified field names and metadata names."""
|
||||
field_names = list(self.field_names) or []
|
||||
metadata_names = list(self.metadata_names) or []
|
||||
all_fields = field_names + metadata_names
|
||||
return {field: 1 for field in all_fields} if all_fields else None
|
||||
|
||||
def _extract_fields(
|
||||
self,
|
||||
document: Dict,
|
||||
fields: Sequence[str],
|
||||
default: str = "",
|
||||
) -> Dict:
|
||||
"""Extracts and returns values for specified fields from a document."""
|
||||
extracted = {}
|
||||
for field in fields or []:
|
||||
value = document
|
||||
for key in field.split("."):
|
||||
value = value.get(key, default)
|
||||
if value == default:
|
||||
break
|
||||
new_field_name = field.replace(".", "_")
|
||||
extracted[new_field_name] = value
|
||||
return extracted
|
||||
|
||||
@@ -8,7 +8,7 @@ from langchain_core.utils import (
|
||||
get_pydantic_field_names,
|
||||
secret_from_env,
|
||||
)
|
||||
from pydantic import BaseModel, ConfigDict, Field, SecretStr, model_validator
|
||||
from pydantic import BaseModel, ConfigDict, Field, Secret, model_validator
|
||||
from typing_extensions import Self
|
||||
|
||||
from langchain_community.utils.openai import is_openai_v1
|
||||
@@ -31,7 +31,7 @@ class DallEAPIWrapper(BaseModel):
|
||||
async_client: Any = Field(default=None, exclude=True) #: :meta private:
|
||||
model_name: str = Field(default="dall-e-2", alias="model")
|
||||
model_kwargs: Dict[str, Any] = Field(default_factory=dict)
|
||||
openai_api_key: SecretStr = Field(
|
||||
openai_api_key: Secret[str] = Field(
|
||||
alias="api_key",
|
||||
default_factory=secret_from_env(
|
||||
"OPENAI_API_KEY",
|
||||
|
||||
87
libs/community/poetry.lock
generated
87
libs/community/poetry.lock
generated
@@ -1262,26 +1262,22 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "importlib-metadata"
|
||||
version = "8.5.0"
|
||||
version = "8.4.0"
|
||||
description = "Read metadata from Python packages"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"},
|
||||
{file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"},
|
||||
{file = "importlib_metadata-8.4.0-py3-none-any.whl", hash = "sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1"},
|
||||
{file = "importlib_metadata-8.4.0.tar.gz", hash = "sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
zipp = ">=3.20"
|
||||
zipp = ">=0.5"
|
||||
|
||||
[package.extras]
|
||||
check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"]
|
||||
cover = ["pytest-cov"]
|
||||
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
|
||||
enabler = ["pytest-enabler (>=2.2)"]
|
||||
perf = ["ipython"]
|
||||
test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"]
|
||||
type = ["pytest-mypy"]
|
||||
test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "iniconfig"
|
||||
@@ -1780,7 +1776,7 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain"
|
||||
version = "0.3.0"
|
||||
version = "0.3.0.dev2"
|
||||
description = "Building applications with LLMs through composability"
|
||||
optional = false
|
||||
python-versions = ">=3.9,<4.0"
|
||||
@@ -1790,8 +1786,8 @@ develop = true
|
||||
[package.dependencies]
|
||||
aiohttp = "^3.8.3"
|
||||
async-timeout = {version = "^4.0.0", markers = "python_version < \"3.11\""}
|
||||
langchain-core = "^0.3.0"
|
||||
langchain-text-splitters = "^0.3.0"
|
||||
langchain-core = "^0.3.0.dev5"
|
||||
langchain-text-splitters = "^0.3.0.dev1"
|
||||
langsmith = "^0.1.17"
|
||||
numpy = [
|
||||
{version = ">=1,<2", markers = "python_version < \"3.12\""},
|
||||
@@ -1809,7 +1805,7 @@ url = "../langchain"
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.3.0"
|
||||
version = "0.3.0.dev5"
|
||||
description = "Building applications with LLMs through composability"
|
||||
optional = false
|
||||
python-versions = ">=3.9,<4.0"
|
||||
@@ -1820,10 +1816,7 @@ develop = true
|
||||
jsonpatch = "^1.33"
|
||||
langsmith = "^0.1.117"
|
||||
packaging = ">=23.2,<25"
|
||||
pydantic = [
|
||||
{version = ">=2.5.2,<3.0.0", markers = "python_full_version < \"3.12.4\""},
|
||||
{version = ">=2.7.4,<3.0.0", markers = "python_full_version >= \"3.12.4\""},
|
||||
]
|
||||
pydantic = "^2.7.4"
|
||||
PyYAML = ">=5.3"
|
||||
tenacity = "^8.1.0,!=8.4.0"
|
||||
typing-extensions = ">=4.7"
|
||||
@@ -1853,7 +1846,7 @@ url = "../standard-tests"
|
||||
|
||||
[[package]]
|
||||
name = "langchain-text-splitters"
|
||||
version = "0.3.0"
|
||||
version = "0.3.0.dev1"
|
||||
description = "LangChain text splitting utilities"
|
||||
optional = false
|
||||
python-versions = ">=3.9,<4.0"
|
||||
@@ -1861,7 +1854,7 @@ files = []
|
||||
develop = true
|
||||
|
||||
[package.dependencies]
|
||||
langchain-core = "^0.3.0"
|
||||
langchain-core = "^0.3.0.dev1"
|
||||
|
||||
[package.source]
|
||||
type = "directory"
|
||||
@@ -1869,13 +1862,13 @@ url = "../text-splitters"
|
||||
|
||||
[[package]]
|
||||
name = "langsmith"
|
||||
version = "0.1.120"
|
||||
version = "0.1.117"
|
||||
description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "langsmith-0.1.120-py3-none-any.whl", hash = "sha256:54d2785e301646c0988e0a69ebe4d976488c87b41928b358cb153b6ddd8db62b"},
|
||||
{file = "langsmith-0.1.120.tar.gz", hash = "sha256:25499ca187b41bd89d784b272b97a8d76f60e0e21bdf20336e8a2aa6a9b23ac9"},
|
||||
{file = "langsmith-0.1.117-py3-none-any.whl", hash = "sha256:e936ee9bcf8293b0496df7ba462a3702179fbe51f9dc28744b0fbec0dbf206ae"},
|
||||
{file = "langsmith-0.1.117.tar.gz", hash = "sha256:a1b532f49968b9339bcaff9118d141846d52ed3d803f342902e7448edf1d662b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2633,22 +2626,22 @@ wcwidth = "*"
|
||||
|
||||
[[package]]
|
||||
name = "protobuf"
|
||||
version = "5.28.1"
|
||||
version = "5.28.0"
|
||||
description = ""
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "protobuf-5.28.1-cp310-abi3-win32.whl", hash = "sha256:fc063acaf7a3d9ca13146fefb5b42ac94ab943ec6e978f543cd5637da2d57957"},
|
||||
{file = "protobuf-5.28.1-cp310-abi3-win_amd64.whl", hash = "sha256:4c7f5cb38c640919791c9f74ea80c5b82314c69a8409ea36f2599617d03989af"},
|
||||
{file = "protobuf-5.28.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4304e4fceb823d91699e924a1fdf95cde0e066f3b1c28edb665bda762ecde10f"},
|
||||
{file = "protobuf-5.28.1-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:0dfd86d2b5edf03d91ec2a7c15b4e950258150f14f9af5f51c17fa224ee1931f"},
|
||||
{file = "protobuf-5.28.1-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:51f09caab818707ab91cf09cc5c156026599cf05a4520779ccbf53c1b352fb25"},
|
||||
{file = "protobuf-5.28.1-cp38-cp38-win32.whl", hash = "sha256:1b04bde117a10ff9d906841a89ec326686c48ececeb65690f15b8cabe7149495"},
|
||||
{file = "protobuf-5.28.1-cp38-cp38-win_amd64.whl", hash = "sha256:cabfe43044ee319ad6832b2fda332646f9ef1636b0130186a3ae0a52fc264bb4"},
|
||||
{file = "protobuf-5.28.1-cp39-cp39-win32.whl", hash = "sha256:4b4b9a0562a35773ff47a3df823177ab71a1f5eb1ff56d8f842b7432ecfd7fd2"},
|
||||
{file = "protobuf-5.28.1-cp39-cp39-win_amd64.whl", hash = "sha256:f24e5d70e6af8ee9672ff605d5503491635f63d5db2fffb6472be78ba62efd8f"},
|
||||
{file = "protobuf-5.28.1-py3-none-any.whl", hash = "sha256:c529535e5c0effcf417682563719e5d8ac8d2b93de07a56108b4c2d436d7a29a"},
|
||||
{file = "protobuf-5.28.1.tar.gz", hash = "sha256:42597e938f83bb7f3e4b35f03aa45208d49ae8d5bcb4bc10b9fc825e0ab5e423"},
|
||||
{file = "protobuf-5.28.0-cp310-abi3-win32.whl", hash = "sha256:66c3edeedb774a3508ae70d87b3a19786445fe9a068dd3585e0cefa8a77b83d0"},
|
||||
{file = "protobuf-5.28.0-cp310-abi3-win_amd64.whl", hash = "sha256:6d7cc9e60f976cf3e873acb9a40fed04afb5d224608ed5c1a105db4a3f09c5b6"},
|
||||
{file = "protobuf-5.28.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:532627e8fdd825cf8767a2d2b94d77e874d5ddb0adefb04b237f7cc296748681"},
|
||||
{file = "protobuf-5.28.0-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:018db9056b9d75eb93d12a9d35120f97a84d9a919bcab11ed56ad2d399d6e8dd"},
|
||||
{file = "protobuf-5.28.0-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:6206afcb2d90181ae8722798dcb56dc76675ab67458ac24c0dd7d75d632ac9bd"},
|
||||
{file = "protobuf-5.28.0-cp38-cp38-win32.whl", hash = "sha256:eef7a8a2f4318e2cb2dee8666d26e58eaf437c14788f3a2911d0c3da40405ae8"},
|
||||
{file = "protobuf-5.28.0-cp38-cp38-win_amd64.whl", hash = "sha256:d001a73c8bc2bf5b5c1360d59dd7573744e163b3607fa92788b7f3d5fefbd9a5"},
|
||||
{file = "protobuf-5.28.0-cp39-cp39-win32.whl", hash = "sha256:dde9fcaa24e7a9654f4baf2a55250b13a5ea701493d904c54069776b99a8216b"},
|
||||
{file = "protobuf-5.28.0-cp39-cp39-win_amd64.whl", hash = "sha256:853db610214e77ee817ecf0514e0d1d052dff7f63a0c157aa6eabae98db8a8de"},
|
||||
{file = "protobuf-5.28.0-py3-none-any.whl", hash = "sha256:510ed78cd0980f6d3218099e874714cdf0d8a95582e7b059b06cabad855ed0a0"},
|
||||
{file = "protobuf-5.28.0.tar.gz", hash = "sha256:dde74af0fa774fa98892209992295adbfb91da3fa98c8f67a88afe8f5a349add"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2842,13 +2835,13 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
|
||||
|
||||
[[package]]
|
||||
name = "pydantic-settings"
|
||||
version = "2.5.2"
|
||||
version = "2.5.0"
|
||||
description = "Settings management using Pydantic"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pydantic_settings-2.5.2-py3-none-any.whl", hash = "sha256:2c912e55fd5794a59bf8c832b9de832dcfdf4778d79ff79b708744eed499a907"},
|
||||
{file = "pydantic_settings-2.5.2.tar.gz", hash = "sha256:f90b139682bee4d2065273d5185d71d37ea46cfe57e1b5ae184fc6a0b2484ca0"},
|
||||
{file = "pydantic_settings-2.5.0-py3-none-any.whl", hash = "sha256:eae04a3dd9adf521a4c959dcefb984e0f3b1d841999daf02f961dcc4d31d2f7f"},
|
||||
{file = "pydantic_settings-2.5.0.tar.gz", hash = "sha256:204828c02481a2e7135466b26a7d65d9e15a17d52d1d8f59cacdf9ad625e1140"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -3048,13 +3041,13 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "pytz"
|
||||
version = "2024.2"
|
||||
version = "2024.1"
|
||||
description = "World timezone definitions, modern and historical"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"},
|
||||
{file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"},
|
||||
{file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"},
|
||||
{file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4040,13 +4033,13 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "2.2.3"
|
||||
version = "2.2.2"
|
||||
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"},
|
||||
{file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"},
|
||||
{file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"},
|
||||
{file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
@@ -4366,13 +4359,13 @@ multidict = ">=4.0"
|
||||
|
||||
[[package]]
|
||||
name = "zipp"
|
||||
version = "3.20.2"
|
||||
version = "3.20.1"
|
||||
description = "Backport of pathlib-compatible object wrapper for zip files"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"},
|
||||
{file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"},
|
||||
{file = "zipp-3.20.1-py3-none-any.whl", hash = "sha256:9960cd8967c8f85a56f920d5d507274e74f9ff813a0ab8889a5b5be2daf44064"},
|
||||
{file = "zipp-3.20.1.tar.gz", hash = "sha256:c22b14cc4763c5a5b04134207736c107db42e9d3ef2d9779d465f5f1bcba572b"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
@@ -4386,4 +4379,4 @@ type = ["pytest-mypy"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.9,<4.0"
|
||||
content-hash = "ee964a118892539749a10eeb2e7e8ce5570cf84faf02ea226fa2af865dc14135"
|
||||
content-hash = "1a81994350c65c891f5f592a522975bc6688cfad016f2af5fe8ad93a76209066"
|
||||
|
||||
@@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.poetry]
|
||||
name = "langchain-community"
|
||||
version = "0.3.0"
|
||||
version = "0.3.0.dev2"
|
||||
description = "Community contributed LangChain integrations."
|
||||
authors = []
|
||||
license = "MIT"
|
||||
@@ -33,8 +33,8 @@ ignore-words-list = "momento,collison,ned,foor,reworkd,parth,whats,aapply,mysogy
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.9,<4.0"
|
||||
langchain-core = "^0.3.0"
|
||||
langchain = "^0.3.0"
|
||||
langchain-core = { version = "^0.3.0.dev5", allow-prereleases = true }
|
||||
langchain = { version = "^0.3.0.dev2", allow-prereleases = true }
|
||||
SQLAlchemy = ">=1.4,<3"
|
||||
requests = "^2"
|
||||
PyYAML = ">=5.3"
|
||||
@@ -63,7 +63,6 @@ addopts = "--strict-markers --strict-config --durations=5 --snapshot-warn-unused
|
||||
markers = [
|
||||
"requires: mark tests as requiring a specific library",
|
||||
"scheduled: mark tests to run in scheduled testing",
|
||||
"runs: mark tests to run in CI",
|
||||
"compile: mark placeholder test used to compile integration tests without running them",
|
||||
]
|
||||
asyncio_mode = "auto"
|
||||
@@ -104,11 +103,7 @@ pytest-socket = "^0.6.0"
|
||||
syrupy = "^4.0.2"
|
||||
requests-mock = "^1.11.0"
|
||||
# TODO: hack to fix 3.9 builds
|
||||
cffi = [
|
||||
{ version = "<1.17.1", python = "<3.10" },
|
||||
{ version = "*", python = ">=3.10" },
|
||||
]
|
||||
|
||||
cffi = [{version = "<1.17.1", python="<3.10"}, {version = "*", python=">=3.10"}]
|
||||
|
||||
[tool.poetry.group.codespell.dependencies]
|
||||
codespell = "^2.2.0"
|
||||
@@ -120,11 +115,7 @@ vcrpy = "^6"
|
||||
[tool.poetry.group.lint.dependencies]
|
||||
ruff = "^0.5"
|
||||
# TODO: hack to fix 3.9 builds
|
||||
cffi = [
|
||||
{ version = "<1.17.1", python = "<3.10" },
|
||||
{ version = "*", python = ">=3.10" },
|
||||
]
|
||||
|
||||
cffi = [{version = "<1.17.1", python="<3.10"}, {version = "*", python=">=3.10"}]
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
jupyter = "^1.0.0"
|
||||
|
||||
@@ -1,11 +1,6 @@
|
||||
from importlib import util
|
||||
import os
|
||||
from typing import Dict, Sequence
|
||||
|
||||
import pytest
|
||||
from pytest import Config, Function, Parser
|
||||
|
||||
# Getting the absolute path of the current file's directory
|
||||
import os
|
||||
|
||||
ABS_PATH = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
# Getting the absolute path of the project's root directory
|
||||
@@ -22,83 +17,3 @@ def _load_env() -> None:
|
||||
|
||||
|
||||
_load_env()
|
||||
|
||||
def pytest_addoption(parser: Parser) -> None:
|
||||
"""Add custom command line options to pytest."""
|
||||
parser.addoption(
|
||||
"--only-extended",
|
||||
action="store_true",
|
||||
help="Only run extended tests. Does not allow skipping any extended tests.",
|
||||
)
|
||||
parser.addoption(
|
||||
"--only-core",
|
||||
action="store_true",
|
||||
help="Only run core tests. Never runs any extended tests.",
|
||||
)
|
||||
|
||||
|
||||
def pytest_collection_modifyitems(config: Config, items: Sequence[Function]) -> None:
|
||||
"""Add implementations for handling custom markers.
|
||||
|
||||
At the moment, this adds support for a custom `requires` marker.
|
||||
|
||||
The `requires` marker is used to denote tests that require one or more packages
|
||||
to be installed to run. If the package is not installed, the test is skipped.
|
||||
|
||||
The `requires` marker syntax is:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@pytest.mark.requires("package1", "package2")
|
||||
def test_something():
|
||||
...
|
||||
"""
|
||||
# Mapping from the name of a package to whether it is installed or not.
|
||||
# Used to avoid repeated calls to `util.find_spec`
|
||||
required_pkgs_info: Dict[str, bool] = {}
|
||||
|
||||
only_extended = config.getoption("--only-extended") or False
|
||||
only_core = config.getoption("--only-core") or False
|
||||
|
||||
if only_extended and only_core:
|
||||
raise ValueError("Cannot specify both `--only-extended` and `--only-core`.")
|
||||
|
||||
for item in items:
|
||||
requires_marker = item.get_closest_marker("requires")
|
||||
if requires_marker is not None:
|
||||
if only_core:
|
||||
item.add_marker(pytest.mark.skip(reason="Skipping not a core test."))
|
||||
continue
|
||||
|
||||
# Iterate through the list of required packages
|
||||
required_pkgs = requires_marker.args
|
||||
for pkg in required_pkgs:
|
||||
# If we haven't yet checked whether the pkg is installed
|
||||
# let's check it and store the result.
|
||||
if pkg not in required_pkgs_info:
|
||||
try:
|
||||
installed = util.find_spec(pkg) is not None
|
||||
except Exception:
|
||||
installed = False
|
||||
required_pkgs_info[pkg] = installed
|
||||
|
||||
if not required_pkgs_info[pkg]:
|
||||
if only_extended:
|
||||
pytest.fail(
|
||||
f"Package `{pkg}` is not installed but is required for "
|
||||
f"extended tests. Please install the given package and "
|
||||
f"try again.",
|
||||
)
|
||||
|
||||
else:
|
||||
# If the package is not installed, we immediately break
|
||||
# and mark the test as skipped.
|
||||
item.add_marker(
|
||||
pytest.mark.skip(reason=f"Requires pkg: `{pkg}`")
|
||||
)
|
||||
break
|
||||
else:
|
||||
if only_extended:
|
||||
item.add_marker(
|
||||
pytest.mark.skip(reason="Skipping not an extended test.")
|
||||
)
|
||||
|
||||
@@ -86,8 +86,6 @@ def test_pdfminer_pdf_as_html_loader() -> None:
|
||||
assert len(docs) == 1
|
||||
|
||||
|
||||
@pytest.mark.runs
|
||||
@pytest.mark.requires("pypdf")
|
||||
def test_pypdf_loader() -> None:
|
||||
"""Test PyPDFLoader."""
|
||||
file_path = Path(__file__).parent.parent / "examples/hello.pdf"
|
||||
@@ -103,8 +101,6 @@ def test_pypdf_loader() -> None:
|
||||
assert len(docs) == 16
|
||||
|
||||
|
||||
@pytest.mark.runs
|
||||
@pytest.mark.requires("pypdf")
|
||||
def test_pypdf_loader_with_layout() -> None:
|
||||
"""Test PyPDFLoader with layout mode."""
|
||||
file_path = Path(__file__).parent.parent / "examples/layout-parser-paper.pdf"
|
||||
|
||||
@@ -121,4 +121,4 @@ def test_callback_manager_configure_context_vars(
|
||||
assert cb.completion_tokens == 1
|
||||
assert cb.total_cost > 0
|
||||
wait_for_all_tracers()
|
||||
assert LangChainTracer._persist_run_single.call_count == 4 # type: ignore
|
||||
assert LangChainTracer._persist_run_single.call_count == 1 # type: ignore
|
||||
|
||||
@@ -12,7 +12,6 @@ def raw_docs() -> List[Dict]:
|
||||
return [
|
||||
{"_id": "1", "address": {"building": "1", "room": "1"}},
|
||||
{"_id": "2", "address": {"building": "2", "room": "2"}},
|
||||
{"_id": "3", "address": {"building": "3", "room": "2"}},
|
||||
]
|
||||
|
||||
|
||||
@@ -20,23 +19,18 @@ def raw_docs() -> List[Dict]:
|
||||
def expected_documents() -> List[Document]:
|
||||
return [
|
||||
Document(
|
||||
page_content="{'_id': '2', 'address': {'building': '2', 'room': '2'}}",
|
||||
page_content="{'_id': '1', 'address': {'building': '1', 'room': '1'}}",
|
||||
metadata={"database": "sample_restaurants", "collection": "restaurants"},
|
||||
),
|
||||
Document(
|
||||
page_content="{'_id': '3', 'address': {'building': '3', 'room': '2'}}",
|
||||
page_content="{'_id': '2', 'address': {'building': '2', 'room': '2'}}",
|
||||
metadata={"database": "sample_restaurants", "collection": "restaurants"},
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.requires("motor")
|
||||
async def test_load_mocked_with_filters(expected_documents: List[Document]) -> None:
|
||||
filter_criteria = {"address.room": {"$eq": "2"}}
|
||||
field_names = ["address.building", "address.room"]
|
||||
metadata_names = ["_id"]
|
||||
include_db_collection_in_metadata = True
|
||||
|
||||
async def test_load_mocked(expected_documents: List[Document]) -> None:
|
||||
mock_async_load = AsyncMock()
|
||||
mock_async_load.return_value = expected_documents
|
||||
|
||||
@@ -57,13 +51,7 @@ async def test_load_mocked_with_filters(expected_documents: List[Document]) -> N
|
||||
new=mock_async_load,
|
||||
):
|
||||
loader = MongodbLoader(
|
||||
"mongodb://localhost:27017",
|
||||
"test_db",
|
||||
"test_collection",
|
||||
filter_criteria=filter_criteria,
|
||||
field_names=field_names,
|
||||
metadata_names=metadata_names,
|
||||
include_db_collection_in_metadata=include_db_collection_in_metadata,
|
||||
"mongodb://localhost:27017", "test_db", "test_collection"
|
||||
)
|
||||
loader.collection = mock_collection
|
||||
documents = await loader.aload()
|
||||
|
||||
@@ -45,6 +45,12 @@ def import_all_modules(package_name: str) -> dict:
|
||||
for importer, modname, ispkg in pkgutil.walk_packages(
|
||||
package.__path__, package.__name__ + "."
|
||||
):
|
||||
if modname.startswith("langchain_core.pydantic_v1") or modname.startswith(
|
||||
"langchain.pydantic_v1"
|
||||
):
|
||||
# These are deprecated and should not be imported as they
|
||||
# intentionally raise an import error
|
||||
continue
|
||||
try:
|
||||
module = importlib.import_module(modname)
|
||||
except ModuleNotFoundError:
|
||||
@@ -138,8 +144,15 @@ def test_serializable_mapping() -> None:
|
||||
|
||||
for k, import_path in serializable_modules.items():
|
||||
import_dir, import_obj = import_path[:-1], import_path[-1]
|
||||
module = ".".join(import_dir)
|
||||
if module.startswith("langchain_core.pydantic_v1") or module.startswith(
|
||||
"langchain.pydantic_v1"
|
||||
):
|
||||
# These are deprecated and should not be imported as they
|
||||
# intentionally raise an import error
|
||||
continue
|
||||
# Import module
|
||||
mod = importlib.import_module(".".join(import_dir))
|
||||
mod = importlib.import_module(module)
|
||||
# Import class
|
||||
cls = getattr(mod, import_obj)
|
||||
assert list(k) == cls.lc_id()
|
||||
|
||||
@@ -333,26 +333,9 @@ def deprecated(
|
||||
old_doc = ""
|
||||
|
||||
# Modify the docstring to include a deprecation notice.
|
||||
if (
|
||||
_alternative
|
||||
and _alternative.split(".")[-1].lower() == _alternative.split(".")[-1]
|
||||
):
|
||||
_alternative = f":meth:`~{_alternative}`"
|
||||
elif _alternative:
|
||||
_alternative = f":class:`~{_alternative}`"
|
||||
|
||||
if (
|
||||
_alternative_import
|
||||
and _alternative_import.split(".")[-1].lower()
|
||||
== _alternative_import.split(".")[-1]
|
||||
):
|
||||
_alternative_import = f":meth:`~{_alternative_import}`"
|
||||
elif _alternative_import:
|
||||
_alternative_import = f":class:`~{_alternative_import}`"
|
||||
|
||||
components = [
|
||||
_message,
|
||||
f"Use {_alternative} instead." if _alternative else "",
|
||||
f"Use ``{_alternative}`` instead." if _alternative else "",
|
||||
f"Use ``{_alternative_import}`` instead." if _alternative_import else "",
|
||||
_addendum,
|
||||
]
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import importlib
|
||||
import json
|
||||
import os
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from langchain_core._api import beta
|
||||
from langchain_core.load.mapping import (
|
||||
@@ -37,9 +37,6 @@ class Reviver:
|
||||
secrets_map: Optional[Dict[str, str]] = None,
|
||||
valid_namespaces: Optional[List[str]] = None,
|
||||
secrets_from_env: bool = True,
|
||||
additional_import_mappings: Optional[
|
||||
Dict[Tuple[str, ...], Tuple[str, ...]]
|
||||
] = None,
|
||||
) -> None:
|
||||
"""Initialize the reviver.
|
||||
|
||||
@@ -51,27 +48,15 @@ class Reviver:
|
||||
to allow to be deserialized. Defaults to None.
|
||||
secrets_from_env: Whether to load secrets from the environment.
|
||||
Defaults to True.
|
||||
additional_import_mappings: A dictionary of additional namespace mappings
|
||||
You can use this to override default mappings or add new mappings.
|
||||
Defaults to None.
|
||||
"""
|
||||
self.secrets_from_env = secrets_from_env
|
||||
self.secrets_map = secrets_map or dict()
|
||||
# By default, only support langchain, but user can pass in additional namespaces
|
||||
# By default only support langchain, but user can pass in additional namespaces
|
||||
self.valid_namespaces = (
|
||||
[*DEFAULT_NAMESPACES, *valid_namespaces]
|
||||
if valid_namespaces
|
||||
else DEFAULT_NAMESPACES
|
||||
)
|
||||
self.additional_import_mappings = additional_import_mappings or dict()
|
||||
self.import_mappings = (
|
||||
{
|
||||
**ALL_SERIALIZABLE_MAPPINGS,
|
||||
**self.additional_import_mappings,
|
||||
}
|
||||
if self.additional_import_mappings
|
||||
else ALL_SERIALIZABLE_MAPPINGS
|
||||
)
|
||||
|
||||
def __call__(self, value: Dict[str, Any]) -> Any:
|
||||
if (
|
||||
@@ -112,16 +97,16 @@ class Reviver:
|
||||
raise ValueError(f"Invalid namespace: {value}")
|
||||
|
||||
# If namespace is in known namespaces, try to use mapping
|
||||
key = tuple(namespace + [name])
|
||||
if namespace[0] in DEFAULT_NAMESPACES:
|
||||
# Get the importable path
|
||||
if key not in self.import_mappings:
|
||||
key = tuple(namespace + [name])
|
||||
if key not in ALL_SERIALIZABLE_MAPPINGS:
|
||||
raise ValueError(
|
||||
"Trying to deserialize something that cannot "
|
||||
"be deserialized in current version of langchain-core: "
|
||||
f"{key}"
|
||||
)
|
||||
import_path = self.import_mappings[key]
|
||||
import_path = ALL_SERIALIZABLE_MAPPINGS[key]
|
||||
# Split into module and name
|
||||
import_dir, import_obj = import_path[:-1], import_path[-1]
|
||||
# Import module
|
||||
@@ -130,12 +115,7 @@ class Reviver:
|
||||
cls = getattr(mod, import_obj)
|
||||
# Otherwise, load by path
|
||||
else:
|
||||
if key in self.additional_import_mappings:
|
||||
import_path = self.import_mappings[key]
|
||||
mod = importlib.import_module(".".join(import_path[:-1]))
|
||||
name = import_path[-1]
|
||||
else:
|
||||
mod = importlib.import_module(".".join(namespace))
|
||||
mod = importlib.import_module(".".join(namespace))
|
||||
cls = getattr(mod, name)
|
||||
|
||||
# The class must be a subclass of Serializable.
|
||||
@@ -157,7 +137,6 @@ def loads(
|
||||
secrets_map: Optional[Dict[str, str]] = None,
|
||||
valid_namespaces: Optional[List[str]] = None,
|
||||
secrets_from_env: bool = True,
|
||||
additional_import_mappings: Optional[Dict[Tuple[str, ...], Tuple[str, ...]]] = None,
|
||||
) -> Any:
|
||||
"""Revive a LangChain class from a JSON string.
|
||||
Equivalent to `load(json.loads(text))`.
|
||||
@@ -171,18 +150,12 @@ def loads(
|
||||
to allow to be deserialized. Defaults to None.
|
||||
secrets_from_env: Whether to load secrets from the environment.
|
||||
Defaults to True.
|
||||
additional_import_mappings: A dictionary of additional namespace mappings
|
||||
You can use this to override default mappings or add new mappings.
|
||||
Defaults to None.
|
||||
|
||||
Returns:
|
||||
Revived LangChain objects.
|
||||
"""
|
||||
return json.loads(
|
||||
text,
|
||||
object_hook=Reviver(
|
||||
secrets_map, valid_namespaces, secrets_from_env, additional_import_mappings
|
||||
),
|
||||
text, object_hook=Reviver(secrets_map, valid_namespaces, secrets_from_env)
|
||||
)
|
||||
|
||||
|
||||
@@ -193,7 +166,6 @@ def load(
|
||||
secrets_map: Optional[Dict[str, str]] = None,
|
||||
valid_namespaces: Optional[List[str]] = None,
|
||||
secrets_from_env: bool = True,
|
||||
additional_import_mappings: Optional[Dict[Tuple[str, ...], Tuple[str, ...]]] = None,
|
||||
) -> Any:
|
||||
"""Revive a LangChain class from a JSON object. Use this if you already
|
||||
have a parsed JSON object, eg. from `json.load` or `orjson.loads`.
|
||||
@@ -207,16 +179,11 @@ def load(
|
||||
to allow to be deserialized. Defaults to None.
|
||||
secrets_from_env: Whether to load secrets from the environment.
|
||||
Defaults to True.
|
||||
additional_import_mappings: A dictionary of additional namespace mappings
|
||||
You can use this to override default mappings or add new mappings.
|
||||
Defaults to None.
|
||||
|
||||
Returns:
|
||||
Revived LangChain objects.
|
||||
"""
|
||||
reviver = Reviver(
|
||||
secrets_map, valid_namespaces, secrets_from_env, additional_import_mappings
|
||||
)
|
||||
reviver = Reviver(secrets_map, valid_namespaces, secrets_from_env)
|
||||
|
||||
def _load(obj: Any) -> Any:
|
||||
if isinstance(obj, dict):
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
from typing import Any, List, Literal
|
||||
|
||||
from langchain_core._api import beta
|
||||
from langchain_core.messages.base import BaseMessage
|
||||
|
||||
|
||||
@beta()
|
||||
class RemoveMessage(BaseMessage):
|
||||
"""Message responsible for deleting other messages."""
|
||||
|
||||
|
||||
@@ -1,43 +1,14 @@
|
||||
from importlib import metadata
|
||||
|
||||
from langchain_core._api.deprecation import warn_deprecated
|
||||
|
||||
## Create namespaces for pydantic v1 and v2.
|
||||
# This code must stay at the top of the file before other modules may
|
||||
# attempt to import pydantic since it adds pydantic_v1 and pydantic_v2 to sys.modules.
|
||||
#
|
||||
# This hack is done for the following reasons:
|
||||
# * Langchain will attempt to remain compatible with both pydantic v1 and v2 since
|
||||
# both dependencies and dependents may be stuck on either version of v1 or v2.
|
||||
# * Creating namespaces for pydantic v1 and v2 should allow us to write code that
|
||||
# unambiguously uses either v1 or v2 API.
|
||||
# * This change is easier to roll out and roll back.
|
||||
|
||||
try:
|
||||
from pydantic.v1 import * # noqa: F403
|
||||
except ImportError:
|
||||
from pydantic import * # type: ignore # noqa: F403
|
||||
|
||||
|
||||
try:
|
||||
_PYDANTIC_MAJOR_VERSION: int = int(metadata.version("pydantic").split(".")[0])
|
||||
except metadata.PackageNotFoundError:
|
||||
_PYDANTIC_MAJOR_VERSION = 0
|
||||
|
||||
warn_deprecated(
|
||||
"0.3.0",
|
||||
removal="1.0.0",
|
||||
alternative="pydantic.v1 or pydantic",
|
||||
message=(
|
||||
"As of langchain-core 0.3.0, LangChain uses pydantic v2 internally. "
|
||||
"The langchain_core.pydantic_v1 module was a "
|
||||
"compatibility shim for pydantic v1, and should no longer be used. "
|
||||
"Please update the code to import from Pydantic directly.\n\n"
|
||||
def _raise_import_error() -> None:
|
||||
"""Raise ImportError with a helpful message."""
|
||||
raise ImportError(
|
||||
"Please do not import from langchain_core.pydantic_v1. "
|
||||
"This module was a compatibility shim for pydantic v1, and should "
|
||||
"no longer be used "
|
||||
"Please update the code to import from Pydantic directly. "
|
||||
"For example, replace imports like: "
|
||||
"`from langchain_core.pydantic_v1 import BaseModel`\n"
|
||||
"with: `from pydantic import BaseModel`\n"
|
||||
"or the v1 compatibility namespace if you are working in a code base "
|
||||
"that has not been fully upgraded to pydantic 2 yet. "
|
||||
"\tfrom pydantic.v1 import BaseModel\n"
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
_raise_import_error()
|
||||
|
||||
@@ -1,24 +1,14 @@
|
||||
from langchain_core._api import warn_deprecated
|
||||
|
||||
try:
|
||||
from pydantic.v1.dataclasses import * # noqa: F403
|
||||
except ImportError:
|
||||
from pydantic.dataclasses import * # type: ignore # noqa: F403
|
||||
|
||||
warn_deprecated(
|
||||
"0.3.0",
|
||||
removal="1.0.0",
|
||||
alternative="pydantic.v1 or pydantic",
|
||||
message=(
|
||||
"As of langchain-core 0.3.0, LangChain uses pydantic v2 internally. "
|
||||
"The langchain_core.pydantic_v1 module was a "
|
||||
"compatibility shim for pydantic v1, and should no longer be used. "
|
||||
"Please update the code to import from Pydantic directly.\n\n"
|
||||
def _raise_import_error() -> None:
|
||||
"""Raise ImportError with a helpful message."""
|
||||
raise ImportError(
|
||||
"Please do not import from langchain_core.pydantic_v1. "
|
||||
"This module was a compatibility shim for pydantic v1, and should "
|
||||
"no longer be used "
|
||||
"Please update the code to import from Pydantic directly. "
|
||||
"For example, replace imports like: "
|
||||
"`from langchain_core.pydantic_v1 import BaseModel`\n"
|
||||
"with: `from pydantic import BaseModel`\n"
|
||||
"or the v1 compatibility namespace if you are working in a code base "
|
||||
"that has not been fully upgraded to pydantic 2 yet. "
|
||||
"\tfrom pydantic.v1 import BaseModel\n"
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
_raise_import_error()
|
||||
|
||||
@@ -1,24 +1,14 @@
|
||||
from langchain_core._api import warn_deprecated
|
||||
|
||||
try:
|
||||
from pydantic.v1.main import * # noqa: F403
|
||||
except ImportError:
|
||||
from pydantic.main import * # type: ignore # noqa: F403
|
||||
|
||||
warn_deprecated(
|
||||
"0.3.0",
|
||||
removal="1.0.0",
|
||||
alternative="pydantic.v1 or pydantic",
|
||||
message=(
|
||||
"As of langchain-core 0.3.0, LangChain uses pydantic v2 internally. "
|
||||
"The langchain_core.pydantic_v1 module was a "
|
||||
"compatibility shim for pydantic v1, and should no longer be used. "
|
||||
"Please update the code to import from Pydantic directly.\n\n"
|
||||
def _raise_import_error() -> None:
|
||||
"""Raise ImportError with a helpful message."""
|
||||
raise ImportError(
|
||||
"Please do not import from langchain_core.pydantic_v1. "
|
||||
"This module was a compatibility shim for pydantic v1, and should "
|
||||
"no longer be used "
|
||||
"Please update the code to import from Pydantic directly. "
|
||||
"For example, replace imports like: "
|
||||
"`from langchain_core.pydantic_v1 import BaseModel`\n"
|
||||
"with: `from pydantic import BaseModel`\n"
|
||||
"or the v1 compatibility namespace if you are working in a code base "
|
||||
"that has not been fully upgraded to pydantic 2 yet. "
|
||||
"\tfrom pydantic.v1 import BaseModel\n"
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
_raise_import_error()
|
||||
|
||||
@@ -25,7 +25,6 @@ from typing import (
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Protocol,
|
||||
Sequence,
|
||||
Set,
|
||||
Tuple,
|
||||
@@ -5740,36 +5739,12 @@ class RunnableBinding(RunnableBindingBase[Input, Output]):
|
||||
return attr
|
||||
|
||||
|
||||
class _RunnableCallableSync(Protocol[Input, Output]):
|
||||
def __call__(self, __in: Input, *, config: RunnableConfig) -> Output: ...
|
||||
|
||||
|
||||
class _RunnableCallableAsync(Protocol[Input, Output]):
|
||||
def __call__(self, __in: Input, *, config: RunnableConfig) -> Awaitable[Output]: ...
|
||||
|
||||
|
||||
class _RunnableCallableIterator(Protocol[Input, Output]):
|
||||
def __call__(
|
||||
self, __in: Iterator[Input], *, config: RunnableConfig
|
||||
) -> Iterator[Output]: ...
|
||||
|
||||
|
||||
class _RunnableCallableAsyncIterator(Protocol[Input, Output]):
|
||||
def __call__(
|
||||
self, __in: AsyncIterator[Input], *, config: RunnableConfig
|
||||
) -> AsyncIterator[Output]: ...
|
||||
|
||||
|
||||
RunnableLike = Union[
|
||||
Runnable[Input, Output],
|
||||
Callable[[Input], Output],
|
||||
Callable[[Input], Awaitable[Output]],
|
||||
Callable[[Iterator[Input]], Iterator[Output]],
|
||||
Callable[[AsyncIterator[Input]], AsyncIterator[Output]],
|
||||
_RunnableCallableSync[Input, Output],
|
||||
_RunnableCallableAsync[Input, Output],
|
||||
_RunnableCallableIterator[Input, Output],
|
||||
_RunnableCallableAsyncIterator[Input, Output],
|
||||
Mapping[str, Any],
|
||||
]
|
||||
|
||||
|
||||
@@ -928,7 +928,7 @@ def _is_message_content_block(obj: Any) -> bool:
|
||||
|
||||
def _stringify(content: Any) -> str:
|
||||
try:
|
||||
return json.dumps(content, ensure_ascii=False)
|
||||
return json.dumps(content)
|
||||
except Exception:
|
||||
return str(content)
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user