diff --git a/.github/scripts/prep_api_docs_build.py b/.github/scripts/prep_api_docs_build.py index 460a24d1fcf..b3df069dba8 100644 --- a/.github/scripts/prep_api_docs_build.py +++ b/.github/scripts/prep_api_docs_build.py @@ -1,5 +1,8 @@ #!/usr/bin/env python -"""Sync libraries from various repositories into this monorepo.""" +"""Sync libraries from various repositories into this monorepo. + +Moves cloned partner packages into libs/partners structure. +""" import os import shutil @@ -10,7 +13,7 @@ import yaml def load_packages_yaml() -> Dict[str, Any]: - """Load and parse the packages.yml file.""" + """Load and parse packages.yml.""" with open("langchain/libs/packages.yml", "r") as f: return yaml.safe_load(f) @@ -61,12 +64,15 @@ def move_libraries(packages: list) -> None: def main(): - """Main function to orchestrate the library sync process.""" + """Orchestrate the library sync process.""" try: # Load packages configuration package_yaml = load_packages_yaml() - # Clean target directories + # Clean/empty target directories in preparation for moving new ones + # + # Only for packages in the langchain-ai org or explicitly included via + # include_in_api_ref, excluding 'langchain' itself and 'langchain-ai21' clean_target_directories( [ p @@ -80,7 +86,9 @@ def main(): ] ) - # Move libraries to their new locations + # Move cloned libraries to their new locations, only for packages in the + # langchain-ai org or explicitly included via include_in_api_ref, + # excluding 'langchain' itself and 'langchain-ai21' move_libraries( [ p @@ -95,7 +103,7 @@ def main(): ] ) - # Delete ones without a pyproject.toml + # Delete partner packages without a pyproject.toml for partner in Path("langchain/libs/partners").iterdir(): if partner.is_dir() and not (partner / "pyproject.toml").exists(): print(f"Removing {partner} as it does not have a pyproject.toml") diff --git a/.github/workflows/api_doc_build.yml b/.github/workflows/api_doc_build.yml index f7c2c62792b..e170c1eaa44 100644 --- a/.github/workflows/api_doc_build.yml +++ b/.github/workflows/api_doc_build.yml @@ -1,6 +1,11 @@ # Build the API reference documentation. # # Runs daily. Can also be triggered manually for immediate updates. +# +# Built HTML pushed to langchain-ai/langchain-api-docs-html. +# +# Looks for langchain-ai org repos in packages.yml and checks them out. +# Calls prep_api_docs_build.py. name: '๐Ÿ“š API Docs' run-name: 'Build & Deploy API Reference' @@ -34,6 +39,8 @@ jobs: uses: mikefarah/yq@master with: cmd: | + # Extract repos from packages.yml that are in the langchain-ai org + # (excluding 'langchain' itself) yq ' .packages[] | select( @@ -80,24 +87,31 @@ jobs: with: python-version: ${{ env.PYTHON_VERSION }} - - name: '๐Ÿ“ฆ Install Initial Python Dependencies' + - name: '๐Ÿ“ฆ Install Initial Python Dependencies using uv' working-directory: langchain run: | python -m pip install -U uv python -m uv pip install --upgrade --no-cache-dir pip setuptools pyyaml - name: '๐Ÿ“ฆ Organize Library Directories' + # Places cloned partner packages into libs/partners structure run: python langchain/.github/scripts/prep_api_docs_build.py - - name: '๐Ÿงน Remove Old HTML Files' + - name: '๐Ÿงน Clear Prior Build' run: + # Remove artifacts from prior docs build rm -rf langchain-api-docs-html/api_reference_build/html - - name: '๐Ÿ“ฆ Install Documentation Dependencies' + - name: '๐Ÿ“ฆ Install Documentation Dependencies using uv' working-directory: langchain run: | + # Install all partner packages in editable mode with overrides python -m uv pip install $(ls ./libs/partners | xargs -I {} echo "./libs/partners/{}") --overrides ./docs/vercel_overrides.txt + + # Install core langchain and other main packages python -m uv pip install libs/core libs/langchain libs/text-splitters libs/community libs/experimental libs/standard-tests + + # Install Sphinx and related packages for building docs python -m uv pip install -r docs/api_reference/requirements.txt - name: '๐Ÿ”ง Configure Git Settings' @@ -109,14 +123,29 @@ jobs: - name: '๐Ÿ“š Build API Documentation' working-directory: langchain run: | + # Generate the API reference RST files python docs/api_reference/create_api_rst.py + + # Build the HTML documentation using Sphinx + # -T: show full traceback on exception + # -E: don't use cached environment (force rebuild, ignore cached doctrees) + # -b html: build HTML docs (vs PDS, etc.) + # -d: path for the cached environment (parsed document trees / doctrees) + # - Separate from output dir for faster incremental builds + # -c: path to conf.py + # -j auto: parallel build using all available CPU cores python -m sphinx -T -E -b html -d ../langchain-api-docs-html/_build/doctrees -c docs/api_reference docs/api_reference ../langchain-api-docs-html/api_reference_build/html -j auto + + # Post-process the generated HTML python docs/api_reference/scripts/custom_formatter.py ../langchain-api-docs-html/api_reference_build/html + # Default index page is blank so we copy in the actual home page. cp ../langchain-api-docs-html/api_reference_build/html/{reference,index}.html + + # Removes Sphinx's intermediate build artifacts after the build is complete. rm -rf ../langchain-api-docs-html/_build/ - # https://github.com/marketplace/actions/add-commit + # Commit and push changes to langchain-api-docs-html repo - uses: EndBug/add-and-commit@v9 with: cwd: langchain-api-docs-html diff --git a/docs/README.md b/docs/README.md index 7d99bcbf194..d2eb0cd48db 100644 --- a/docs/README.md +++ b/docs/README.md @@ -1,3 +1,159 @@ # LangChain Documentation -For more information on contributing to our documentation, see the [Documentation Contributing Guide](https://python.langchain.com/docs/contributing/how_to/documentation) +For more information on contributing to our documentation, see the [Documentation Contributing Guide](https://python.langchain.com/docs/contributing/how_to/documentation). + +## Structure + +The primary documentation is located in the `docs/` directory. This directory contains +both the source files for the main documentation as well as the API reference doc +build process. + +### API Reference + +API reference documentation is located in `docs/api_reference/` and is generated from +the codebase using Sphinx. + +The API reference have additional build steps that differ from the main documentation. + +#### Deployment Process + +Currently, the build process roughly follows these steps: + +1. Using the `api_doc_build.yml` GitHub workflow, the API reference docs are + [built](#build-technical-details) and copied to the `langchain-api-docs-html` + repository. This workflow is triggered either (1) on a cron routine interval or (2) + triggered manually. + + In short, the workflow extracts all `langchain-ai`-org-owned repos defined in + `langchain/libs/packages.yml`, clones them locally (in the workflow runner's file + system), and then builds the API reference RST files (using `create_api_rst.py`). + Following post-processing, the HTML files are pushed to the + `langchain-api-docs-html` repository. +2. After the HTML files are in the `langchain-api-docs-html` repository, they are **not** + automatically published to the [live docs site](https://python.langchain.com/api_reference/). + + The docs site is served by Vercel. The Vercel deployment process copies the HTML + files from the `langchain-api-docs-html` repository and deploys them to the live + site. Deployments are triggered on each new commit pushed to `master`. + +#### Build Technical Details + +The build process creates a virtual monorepo by syncing multiple repositories, then generates comprehensive API documentation: + +1. **Repository Sync Phase:** + - `.github/scripts/prep_api_docs_build.py` - Clones external partner repos and organizes them into the `libs/partners/` structure to create a virtual monorepo for documentation building + +2. **RST Generation Phase:** + - `docs/api_reference/create_api_rst.py` - Main script that **generates RST files** from Python source code + - Scans `libs/` directories and extracts classes/functions from each module (using `inspect`) + - Creates `.rst` files using specialized templates for different object types + - Templates in `docs/api_reference/templates/` (`pydantic.rst`, `runnable_pydantic.rst`, etc.) + +3. **HTML Build Phase:** + - Sphinx-based, uses `sphinx.ext.autodoc` (auto-extracts docstrings from the codebase) + - `docs/api_reference/conf.py` (sphinx config) configures `autodoc` and other extensions + - `sphinx-build` processes the generated `.rst` files into HTML using autodoc + - `docs/api_reference/scripts/custom_formatter.py` - Post-processes the generated HTML + - Copies `reference.html` to `index.html` to create the default landing page (artifact? might not need to do this - just put everyhing in index directly?) + +4. **Deployment:** + - `.github/workflows/api_doc_build.yml` - Workflow responsible for orchestrating the entire build and deployment process + - Built HTML files are committed and pushed to the `langchain-api-docs-html` repository + +#### Local Build + +For local development and testing of API documentation, use the Makefile targets in the repository root: + +```bash +# Full build +make api_docs_build +``` + +Like the CI process, this target: + +- Installs the CLI package in editable mode +- Generates RST files for all packages using `create_api_rst.py` +- Builds HTML documentation with Sphinx +- Post-processes the HTML with `custom_formatter.py` +- Opens the built documentation (`reference.html`) in your browser + +**Quick Preview:** + +```bash +make api_docs_quick_preview API_PKG=openai +``` + +- Generates RST files for only the specified package (default: `text-splitters`) +- Builds and post-processes HTML documentation +- Opens the preview in your browser + +Both targets automatically clean previous builds and handle the complete build pipeline locally, mirroring the CI process but for faster iteration during development. + +#### Documentation Standards + +**Docstring Format:** +The API reference uses **Google-style docstrings** with reStructuredText markup. Sphinx processes these through the `sphinx.ext.napoleon` extension to generate documentation. + +**Required format:** + +```python +def example_function(param1: str, param2: int = 5) -> bool: + """Brief description of the function. + + Longer description can go here. Use reStructuredText syntax for + rich formatting like **bold** and *italic*. + + TODO: code: figure out what works? + + Args: + param1: Description of the first parameter. + param2: Description of the second parameter with default value. + + Returns: + Description of the return value. + + Raises: + ValueError: When param1 is empty. + TypeError: When param2 is not an integer. + + .. warning:: + This function is experimental and may change. + + .. deprecated:: 0.2.0 + Use :func:`new_function` instead. + """ +``` + +**Special Markers:** + +- `:private:` in docstrings excludes members from documentation +- `.. deprecated::` creates deprecation notices +- `.. warning::` adds warning admonitions +- `.. beta::` (custom directive) marks beta features + +#### Site Styling and Assets + +**Theme and Styling:** + +- Uses [**PyData Sphinx Theme**](https://pydata-sphinx-theme.readthedocs.io/en/stable/index.html) (`pydata_sphinx_theme`) +- Custom CSS in `docs/api_reference/_static/css/custom.css` with LangChain-specific: + - Color palette + - Inter font family + - Custom navbar height and sidebar formatting + - Deprecated/beta feature styling + +**Static Assets:** + +- Logos: `_static/wordmark-api.svg` (light) and `_static/wordmark-api-dark.svg` (dark mode) +- Favicon: `_static/img/brand/favicon.png` +- Custom CSS: `_static/css/custom.css` + +**Post-Processing:** + +- `scripts/custom_formatter.py` cleans up generated HTML: + - Shortens TOC entries from `ClassName.method()` to `method()` + +**Analytics and Integration:** + +- GitHub integration (source links, edit buttons) +- Example backlinking through custom `ExampleLinksDirective` diff --git a/docs/api_reference/conf.py b/docs/api_reference/conf.py index 1c50d89dac9..995c2022343 100644 --- a/docs/api_reference/conf.py +++ b/docs/api_reference/conf.py @@ -1,7 +1,5 @@ """Configuration file for the Sphinx documentation builder.""" -# Configuration file for the Sphinx documentation builder. -# # This file only contains a selection of the most common options. For a full # list see the documentation: # https://www.sphinx-doc.org/en/master/usage/configuration.html @@ -20,16 +18,18 @@ from docutils.parsers.rst.directives.admonitions import BaseAdmonition from docutils.statemachine import StringList from sphinx.util.docutils import SphinxDirective -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. - +# Add paths to Python import system so Sphinx can import LangChain modules +# This allows autodoc to introspect and document the actual code _DIR = Path(__file__).parent.absolute() -sys.path.insert(0, os.path.abspath(".")) -sys.path.insert(0, os.path.abspath("../../libs/langchain")) +sys.path.insert(0, os.path.abspath(".")) # Current directory +sys.path.insert(0, os.path.abspath("../../libs/langchain")) # LangChain main package +# Load package metadata from pyproject.toml (for version info, etc.) with (_DIR.parents[1] / "libs" / "langchain" / "pyproject.toml").open("r") as f: data = toml.load(f) + +# Load mapping of classes to example notebooks for backlinking +# This file is generated by scripts that scan our tutorial/example notebooks with (_DIR / "guide_imports.json").open("r") as f: imported_classes = json.load(f) @@ -86,6 +86,7 @@ class Beta(BaseAdmonition): def setup(app): + """Register custom directives and hooks with Sphinx.""" app.add_directive("example_links", ExampleLinksDirective) app.add_directive("beta", Beta) app.connect("autodoc-skip-member", skip_private_members) @@ -258,7 +259,20 @@ html_static_path = ["_static"] html_css_files = ["css/custom.css"] html_use_index = False -myst_enable_extensions = ["colon_fence"] +myst_enable_extensions = [ + "colon_fence", # ::: directive blocks (existing prior to LangGraph support) + # LangGraph compatibility extensions added for consolidation + # TODO: check for presence of each in LangGraph and only enable if needed + # "deflist", # Definition lists + # "tasklist", # - [ ] checkboxes (common in examples) + # "attrs_inline", # {.class} inline attributes (MkDocs style) + # "attrs_block", # Block-level attributes + # "substitution", # Variable substitution + # "linkify", # Auto-link URLs in text + # Math extensions (uncomment if LangGraph uses mathematical notation) + # "dollarmath", # $ math $ inline math + # "amsmath", # Advanced math environments +] # generate autosummary even if no references autosummary_generate = True @@ -268,11 +282,11 @@ autosummary_ignore_module_all = False html_copy_source = False html_show_sourcelink = False +googleanalytics_id = "G-9B66JQQH2F" + # Set canonical URL from the Read the Docs Domain html_baseurl = os.environ.get("READTHEDOCS_CANONICAL_URL", "") -googleanalytics_id = "G-9B66JQQH2F" - # Tell Jinja2 templates the build is running on Read the Docs if os.environ.get("READTHEDOCS", "") == "True": html_context["READTHEDOCS"] = True diff --git a/docs/api_reference/create_api_rst.py b/docs/api_reference/create_api_rst.py index ab48684f63c..dcab7969504 100644 --- a/docs/api_reference/create_api_rst.py +++ b/docs/api_reference/create_api_rst.py @@ -1,4 +1,41 @@ -"""Script for auto-generating api_reference.rst.""" +"""Auto-generate API reference documentation (RST files) for LangChain packages. + +* Automatically discovers all packages in `libs/` and `libs/partners/` +* For each package, recursively walks the filesystem to: + * Load Python modules using importlib + * Extract classes and functions using Python's inspect module + * Classify objects by type (Pydantic models, Runnables, TypedDicts, etc.) + * Filter out private members (names starting with '_') and deprecated items +* Creates structured RST files with: + * Module-level documentation pages with autosummary tables + * Different Sphinx templates based on object type (see templates/ directory) + * Proper cross-references and navigation structure + * Separation of current vs deprecated APIs +* Generates a directory tree like: + ``` + docs/api_reference/ + โ”œโ”€โ”€ index.md # Main landing page with package gallery + โ”œโ”€โ”€ reference.md # Package overview and navigation + โ”œโ”€โ”€ core/ # langchain-core documentation + โ”‚ โ”œโ”€โ”€ index.rst + โ”‚ โ”œโ”€โ”€ callbacks.rst + โ”‚ โ””โ”€โ”€ ... + โ”œโ”€โ”€ langchain/ # langchain documentation + โ”‚ โ”œโ”€โ”€ index.rst + โ”‚ โ””โ”€โ”€ ... + โ””โ”€โ”€ partners/ # Integration packages + โ”œโ”€โ”€ openai/ + โ”œโ”€โ”€ anthropic/ + โ””โ”€โ”€ ... + ``` + +## Key Features + +* Respects privacy markers: + * Modules with `:private:` in docstring are excluded entirely + * Objects with `:private:` in docstring are filtered out + * Names starting with '_' are treated as private +""" import importlib import inspect @@ -178,11 +215,12 @@ def _load_package_modules( of the modules/packages are part of the package vs. 3rd party or built-in. Parameters: - package_directory (Union[str, Path]): Path to the package directory. - submodule (Optional[str]): Optional name of submodule to load. + package_directory: Path to the package directory. + submodule: Optional name of submodule to load. Returns: - Dict[str, ModuleMembers]: A dictionary where keys are module names and values are ModuleMembers objects. + A dictionary where keys are module names and values are `ModuleMembers` + objects. """ package_path = ( Path(package_directory) @@ -199,12 +237,13 @@ def _load_package_modules( package_path = package_path / submodule for file_path in package_path.rglob("*.py"): + # Skip private modules if file_path.name.startswith("_"): continue + # Skip integration_template and project_template directories (for libs/cli) if "integration_template" in file_path.parts: continue - if "project_template" in file_path.parts: continue @@ -215,8 +254,13 @@ def _load_package_modules( continue # Get the full namespace of the module + # Example: langchain_core/schema/output_parsers.py -> + # langchain_core.schema.output_parsers namespace = str(relative_module_name).replace(".py", "").replace("/", ".") + # Keep only the top level namespace + # Example: langchain_core.schema.output_parsers -> + # langchain_core top_namespace = namespace.split(".")[0] try: @@ -253,16 +297,16 @@ def _construct_doc( members_by_namespace: Dict[str, ModuleMembers], package_version: str, ) -> List[typing.Tuple[str, str]]: - """Construct the contents of the reference.rst file for the given package. + """Construct the contents of the `reference.rst` for the given package. Args: package_namespace: The package top level namespace - members_by_namespace: The members of the package, dict organized by top level - module contains a list of classes and functions - inside of the top level namespace. + members_by_namespace: The members of the package dict organized by top level. + Module contains a list of classes and functions inside of the top level + namespace. Returns: - The contents of the reference.rst file. + The string contents of the reference.rst file. """ docs = [] index_doc = f"""\ @@ -465,10 +509,13 @@ def _construct_doc( def _build_rst_file(package_name: str = "langchain") -> None: - """Create a rst file for building of documentation. + """Create a rst file for a given package. Args: - package_name: Can be either "langchain" or "core" + package_name: Name of the package to create the rst file for. + + Returns: + The rst file is created in the same directory as this script. """ package_dir = _package_dir(package_name) package_members = _load_package_modules(package_dir) @@ -500,7 +547,10 @@ def _package_namespace(package_name: str) -> str: def _package_dir(package_name: str = "langchain") -> Path: - """Return the path to the directory containing the documentation.""" + """Return the path to the directory containing the documentation. + + Attempts to find the package in `libs/` first, then `libs/partners/`. + """ if (ROOT_DIR / "libs" / package_name).exists(): return ROOT_DIR / "libs" / package_name / _package_namespace(package_name) else: @@ -514,7 +564,7 @@ def _package_dir(package_name: str = "langchain") -> Path: def _get_package_version(package_dir: Path) -> str: - """Return the version of the package.""" + """Return the version of the package by reading the `pyproject.toml`.""" try: with open(package_dir.parent / "pyproject.toml", "r") as f: pyproject = toml.load(f) @@ -540,6 +590,15 @@ def _out_file_path(package_name: str) -> Path: def _build_index(dirs: List[str]) -> None: + """Build the index.md file for the API reference. + + Args: + dirs: List of package directories to include in the index. + + Returns: + The index.md file is created in the same directory as this script. + """ + custom_names = { "aws": "AWS", "ai21": "AI21", @@ -647,9 +706,14 @@ See the full list of integrations in the Section Navigation. {integration_tree} ``` """ + # Write the reference.md file with open(HERE / "reference.md", "w") as f: f.write(doc) + # Write a dummy index.md file that points to reference.md + # Sphinx requires an index file to exist in each doc directory + # TODO: investigate why we don't just put everything in index.md directly? + # if it works it works I guess dummy_index = """\ # API reference @@ -665,8 +729,11 @@ Reference def main(dirs: Optional[list] = None) -> None: - """Generate the api_reference.rst file for each package.""" - print("Starting to build API reference files.") + """Generate the `api_reference.rst` file for each package. + + If dirs is None, generate for all packages in `libs/` and `libs/partners/`. + Otherwise generate only for the specified package(s). + """ if not dirs: dirs = [ p.parent.name @@ -675,18 +742,17 @@ def main(dirs: Optional[list] = None) -> None: if p.parent.parent.name in ("libs", "partners") ] for dir_ in sorted(dirs): - # Skip any hidden directories + # Skip any hidden directories prefixed with a dot # Some of these could be present by mistake in the code base - # e.g., .pytest_cache from running tests from the wrong location. + # (e.g., .pytest_cache from running tests from the wrong location) if dir_.startswith("."): print("Skipping dir:", dir_) continue else: - print("Building package:", dir_) + print("Building:", dir_) _build_rst_file(package_name=dir_) _build_index(sorted(dirs)) - print("API reference files built.") if __name__ == "__main__": diff --git a/docs/api_reference/requirements.txt b/docs/api_reference/requirements.txt index b59b8d43722..2de64dba703 100644 --- a/docs/api_reference/requirements.txt +++ b/docs/api_reference/requirements.txt @@ -1,12 +1,12 @@ autodoc_pydantic>=2,<3 sphinx>=8,<9 -myst-parser>=3 sphinx-autobuild>=2024 +sphinx-design +sphinx-copybutton +sphinxcontrib-googleanalytics pydata-sphinx-theme>=0.15 +myst-parser>=3 toml>=0.10.2 myst-nb>=1.1.1 pyyaml -sphinx-design -sphinx-copybutton beautifulsoup4 -sphinxcontrib-googleanalytics diff --git a/docs/api_reference/scripts/custom_formatter.py b/docs/api_reference/scripts/custom_formatter.py index b74231eec98..d62af6b2b8b 100644 --- a/docs/api_reference/scripts/custom_formatter.py +++ b/docs/api_reference/scripts/custom_formatter.py @@ -1,3 +1,10 @@ +"""Post-process generated HTML files to clean up table-of-contents headers. + +Runs after Sphinx generates the API reference HTML. It finds TOC entries like +"ClassName.method_name()" and shortens them to just "method_name()" for better +readability in the sidebar navigation. +""" + import sys from glob import glob from pathlib import Path diff --git a/pyproject.toml b/pyproject.toml index 1a408d27676..8f51a229ee4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -54,6 +54,7 @@ test = [ ] docs = [ "toml>=0.10.2,<0.11.0", + "types-toml>=0.10.8.20240310", "autodoc-pydantic>=2.0.0,<3.0.0", "sphinx>=6.0.0,<8.0.0", "myst-parser>=3.0.0,<4.0.0", @@ -65,6 +66,8 @@ docs = [ "sphinx-copybutton", "beautifulsoup4", "sphinxcontrib-googleanalytics", + "docutils>=0.21.2,<1.0.0", + "types-docutils>=0.22.1.20250923", ] [tool.uv.sources] diff --git a/uv.lock b/uv.lock index 82a5dbae26c..1cd5837a672 100644 --- a/uv.lock +++ b/uv.lock @@ -1476,8 +1476,8 @@ name = "html5lib" version = "1.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "six" }, - { name = "webencodings" }, + { name = "six", marker = "python_full_version < '3.13'" }, + { name = "webencodings", marker = "python_full_version < '3.13'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/ac/b6/b55c3f49042f1df3dcd422b7f224f939892ee94f22abcf503a9b7339eaf2/html5lib-1.1.tar.gz", hash = "sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f", size = 272215, upload-time = "2020-06-22T23:32:38.834Z" } wheels = [ @@ -2364,6 +2364,7 @@ test = [ { name = "onnxruntime", marker = "python_full_version >= '3.10'" }, { name = "pytest", specifier = ">=7.3.0,<8.0.0" }, { name = "pytest-asyncio", specifier = ">=0.21.1,<1.0.0" }, + { name = "pytest-benchmark" }, { name = "pytest-mock", specifier = ">=3.10.0,<4.0.0" }, { name = "pytest-socket", specifier = ">=0.7.0,<1.0.0" }, { name = "pytest-watcher", specifier = ">=0.3.4,<1.0.0" }, @@ -2484,7 +2485,7 @@ requires-dist = [ { name = "aiohttp", specifier = ">=3.9.1,<4.0.0" }, { name = "fireworks-ai", specifier = ">=0.13.0,<1.0.0" }, { name = "langchain-core", editable = "libs/core" }, - { name = "openai", specifier = ">=1.10.0,<2.0.0" }, + { name = "openai", specifier = ">=1.0.0,<1.108.0" }, { name = "requests", specifier = ">=2.0.0,<3.0.0" }, ] @@ -2621,6 +2622,7 @@ dev = [ docs = [ { name = "autodoc-pydantic" }, { name = "beautifulsoup4" }, + { name = "docutils" }, { name = "myst-nb" }, { name = "myst-parser" }, { name = "pydata-sphinx-theme" }, @@ -2631,6 +2633,8 @@ docs = [ { name = "sphinx-design" }, { name = "sphinxcontrib-googleanalytics" }, { name = "toml" }, + { name = "types-docutils" }, + { name = "types-toml" }, ] lint = [ { name = "ruff" }, @@ -2680,6 +2684,7 @@ dev = [ docs = [ { name = "autodoc-pydantic", specifier = ">=2.0.0,<3.0.0" }, { name = "beautifulsoup4" }, + { name = "docutils", specifier = ">=0.21.2" }, { name = "myst-nb", specifier = ">=1.1.1" }, { name = "myst-parser", specifier = ">=3.0.0,<4.0.0" }, { name = "pydata-sphinx-theme", specifier = ">=0.15.0" }, @@ -2690,6 +2695,8 @@ docs = [ { name = "sphinx-design" }, { name = "sphinxcontrib-googleanalytics" }, { name = "toml", specifier = ">=0.10.2,<0.11.0" }, + { name = "types-docutils", specifier = ">=0.22.1.20250923" }, + { name = "types-toml", specifier = ">=0.10.8.20240310" }, ] lint = [{ name = "ruff", specifier = ">=0.12.2,<0.13.0" }] test = [ @@ -2737,7 +2744,7 @@ dependencies = [ [package.metadata] requires-dist = [ { name = "langchain-core", editable = "libs/core" }, - { name = "openai", specifier = ">=1.104.2,<2.0.0" }, + { name = "openai", specifier = ">=1.104.2,<1.108.0" }, { name = "tiktoken", specifier = ">=0.7.0,<1.0.0" }, ] @@ -2822,6 +2829,8 @@ test = [ test-integration = [ { name = "en-core-web-sm", url = "https://github.com/explosion/spacy-models/releases/download/en_core_web_sm-3.8.0/en_core_web_sm-3.8.0-py3-none-any.whl" }, { name = "nltk", specifier = ">=3.9.1,<4.0.0" }, + { name = "scipy", marker = "python_full_version == '3.12.*'", specifier = ">=1.7.0,<2.0.0" }, + { name = "scipy", marker = "python_full_version >= '3.13'", specifier = ">=1.14.1,<2.0.0" }, { name = "sentence-transformers", specifier = ">=3.0.1,<4.0.0" }, { name = "spacy", specifier = ">=3.8.7,<4.0.0" }, { name = "thinc", specifier = ">=8.3.6,<9.0.0" }, @@ -2862,7 +2871,7 @@ name = "langdetect" version = "1.0.9" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "six" }, + { name = "six", marker = "python_full_version < '3.13'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/0e/72/a3add0e4eec4eb9e2569554f7c70f4a3c27712f40e3284d483e88094cc0e/langdetect-1.0.9.tar.gz", hash = "sha256:cbc1fef89f8d062739774bd51eda3da3274006b3661d199c2655f6b3f6d605a0", size = 981474, upload-time = "2021-05-07T07:54:13.562Z" } @@ -3630,10 +3639,10 @@ name = "nltk" version = "3.9.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "click" }, - { name = "joblib" }, - { name = "regex" }, - { name = "tqdm" }, + { name = "click", marker = "python_full_version < '3.13'" }, + { name = "joblib", marker = "python_full_version < '3.13'" }, + { name = "regex", marker = "python_full_version < '3.13'" }, + { name = "tqdm", marker = "python_full_version < '3.13'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/3c/87/db8be88ad32c2d042420b6fd9ffd4a149f9a0d7f0e86b3f543be2eeeedd2/nltk-3.9.1.tar.gz", hash = "sha256:87d127bd3de4bd89a4f81265e5fa59cb1b199b27440175370f7417d2bc7ae868", size = 2904691, upload-time = "2024-08-18T19:48:37.769Z" } wheels = [ @@ -4944,9 +4953,9 @@ name = "python-oxmsg" version = "0.0.2" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "click" }, - { name = "olefile" }, - { name = "typing-extensions" }, + { name = "click", marker = "python_full_version < '3.13'" }, + { name = "olefile", marker = "python_full_version < '3.13'" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/a2/4e/869f34faedbc968796d2c7e9837dede079c9cb9750917356b1f1eda926e9/python_oxmsg-0.0.2.tar.gz", hash = "sha256:a6aff4deb1b5975d44d49dab1d9384089ffeec819e19c6940bc7ffbc84775fad", size = 34713, upload-time = "2025-02-03T17:13:47.415Z" } wheels = [ @@ -6127,6 +6136,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d0/cc/0a838ba5ca64dc832aa43f727bd586309846b0ffb2ce52422543e6075e8a/typer-0.15.1-py3-none-any.whl", hash = "sha256:7994fb7b8155b64d3402518560648446072864beefd44aa2dc36972a5972e847", size = 44908, upload-time = "2024-12-04T17:44:57.291Z" }, ] +[[package]] +name = "types-docutils" +version = "0.22.1.20250923" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/80/76/3b2b841d2ea3b59685c59cfcfeddd439146287319c1566099796507ee866/types_docutils-0.22.1.20250923.tar.gz", hash = "sha256:f7754c7eeab44144095e287bb3afcb96d6936dc3fa332cff4e5ef5533baa198f", size = 56652, upload-time = "2025-09-23T02:51:20.573Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/29/22/c762f76ace7de3c8d469f9f25a8b8870bff47fb31ecc15453ac96426f36a/types_docutils-0.22.1.20250923-py3-none-any.whl", hash = "sha256:a01a8036b373dd578ec1a170db04f4159be1282ca92f0a085e50f4afbeb8d0bc", size = 91872, upload-time = "2025-09-23T02:51:19.305Z" }, +] + [[package]] name = "types-python-dateutil" version = "2.9.0.20241206" @@ -6136,6 +6154,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/0f/b3/ca41df24db5eb99b00d97f89d7674a90cb6b3134c52fb8121b6d8d30f15c/types_python_dateutil-2.9.0.20241206-py3-none-any.whl", hash = "sha256:e248a4bc70a486d3e3ec84d0dc30eec3a5f979d6e7ee4123ae043eedbb987f53", size = 14384, upload-time = "2024-12-06T02:56:39.412Z" }, ] +[[package]] +name = "types-toml" +version = "0.10.8.20240310" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/86/47/3e4c75042792bff8e90d7991aa5c51812cc668828cc6cce711e97f63a607/types-toml-0.10.8.20240310.tar.gz", hash = "sha256:3d41501302972436a6b8b239c850b26689657e25281b48ff0ec06345b8830331", size = 4392, upload-time = "2024-03-10T02:18:37.518Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/da/a2/d32ab58c0b216912638b140ab2170ee4b8644067c293b170e19fba340ccc/types_toml-0.10.8.20240310-py3-none-any.whl", hash = "sha256:627b47775d25fa29977d9c70dc0cbab3f314f32c8d8d0c012f2ef5de7aaec05d", size = 4777, upload-time = "2024-03-10T02:18:36.568Z" }, +] + [[package]] name = "typing-extensions" version = "4.12.2"