mirror of
https://github.com/hwchase17/langchain.git
synced 2026-01-24 22:05:39 +00:00
docs: v1 docs updates (#33173)
Co-authored-by: Mohammad Mohtashim <45242107+keenborder786@users.noreply.github.com> Co-authored-by: Caspar Broekhuizen <caspar@langchain.dev> Co-authored-by: ccurme <chester.curme@gmail.com> Co-authored-by: Christophe Bornet <cbornet@hotmail.com> Co-authored-by: Eugene Yurtsev <eyurtsev@gmail.com> Co-authored-by: Sadra Barikbin <sadraqazvin1@yahoo.com> Co-authored-by: Vadym Barda <vadim.barda@gmail.com>
This commit is contained in:
63
.github/workflows/_test_doc_imports.yml
vendored
63
.github/workflows/_test_doc_imports.yml
vendored
@@ -1,63 +0,0 @@
|
||||
# Validates that all import statements in `.ipynb` notebooks are correct and functional.
|
||||
#
|
||||
# Called as part of check_diffs.yml.
|
||||
#
|
||||
# Installs test dependencies and LangChain packages in editable mode and
|
||||
# runs check_imports.py.
|
||||
|
||||
name: '📑 Documentation Import Testing'
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
python-version:
|
||||
required: true
|
||||
type: string
|
||||
description: "Python version to use"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
UV_FROZEN: "true"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
name: '🔍 Check Doc Imports (Python ${{ inputs.python-version }})'
|
||||
steps:
|
||||
- name: '📋 Checkout Code'
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: '🐍 Set up Python ${{ inputs.python-version }} + UV'
|
||||
uses: "./.github/actions/uv_setup"
|
||||
with:
|
||||
python-version: ${{ inputs.python-version }}
|
||||
cache-suffix: test-doc-imports-${{ inputs.working-directory }}
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
|
||||
- name: '📦 Install Test Dependencies'
|
||||
shell: bash
|
||||
run: uv sync --group test
|
||||
|
||||
- name: '📦 Install LangChain in Editable Mode'
|
||||
run: |
|
||||
VIRTUAL_ENV=.venv uv pip install langchain-experimental langchain-community -e libs/core libs/langchain
|
||||
|
||||
- name: '🔍 Validate Documentation Import Statements'
|
||||
shell: bash
|
||||
run: |
|
||||
uv run python docs/scripts/check_imports.py
|
||||
|
||||
- name: '🧹 Verify Clean Working Directory'
|
||||
shell: bash
|
||||
run: |
|
||||
set -eu
|
||||
|
||||
STATUS="$(git status)"
|
||||
echo "$STATUS"
|
||||
|
||||
# grep will exit non-zero if the target message isn't found,
|
||||
# and `set -e` above will cause the step to fail.
|
||||
echo "$STATUS" | grep 'nothing to commit, working tree clean'
|
||||
152
.github/workflows/api_doc_build.yml
vendored
152
.github/workflows/api_doc_build.yml
vendored
@@ -1,152 +0,0 @@
|
||||
# Build the API reference documentation.
|
||||
#
|
||||
# Runs daily. Can also be triggered manually for immediate updates.
|
||||
#
|
||||
# Built HTML pushed to langchain-ai/langchain-api-docs-html.
|
||||
#
|
||||
# Looks for langchain-ai org repos in packages.yml and checks them out.
|
||||
# Calls prep_api_docs_build.py.
|
||||
|
||||
name: '📚 API Docs'
|
||||
run-name: 'Build & Deploy API Reference'
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 13 * * *' # Runs daily at 1PM UTC (9AM EDT/6AM PDT)
|
||||
env:
|
||||
PYTHON_VERSION: "3.11"
|
||||
|
||||
jobs:
|
||||
# Only runs on main repository to prevent unnecessary builds on forks
|
||||
build:
|
||||
if: github.repository == 'langchain-ai/langchain' || github.event_name != 'schedule'
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
path: langchain
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
repository: langchain-ai/langchain-api-docs-html
|
||||
path: langchain-api-docs-html
|
||||
token: ${{ secrets.TOKEN_GITHUB_API_DOCS_HTML }}
|
||||
|
||||
- name: '📋 Extract Repository List with yq'
|
||||
id: get-unsorted-repos
|
||||
uses: mikefarah/yq@master
|
||||
with:
|
||||
cmd: |
|
||||
# Extract repos from packages.yml that are in the langchain-ai org
|
||||
# (excluding 'langchain' itself)
|
||||
yq '
|
||||
.packages[]
|
||||
| select(
|
||||
(
|
||||
(.repo | test("^langchain-ai/"))
|
||||
and
|
||||
(.repo != "langchain-ai/langchain")
|
||||
)
|
||||
or
|
||||
(.include_in_api_ref // false)
|
||||
)
|
||||
| .repo
|
||||
' langchain/libs/packages.yml
|
||||
|
||||
- name: '📋 Parse YAML & Checkout Repositories'
|
||||
env:
|
||||
REPOS_UNSORTED: ${{ steps.get-unsorted-repos.outputs.result }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
# Get unique repositories
|
||||
REPOS=$(echo "$REPOS_UNSORTED" | sort -u)
|
||||
# Checkout each unique repository
|
||||
for repo in $REPOS; do
|
||||
# Validate repository format (allow any org with proper format)
|
||||
if [[ ! "$repo" =~ ^[a-zA-Z0-9_.-]+/[a-zA-Z0-9_.-]+$ ]]; then
|
||||
echo "Error: Invalid repository format: $repo"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
REPO_NAME=$(echo $repo | cut -d'/' -f2)
|
||||
|
||||
# Additional validation for repo name
|
||||
if [[ ! "$REPO_NAME" =~ ^[a-zA-Z0-9_.-]+$ ]]; then
|
||||
echo "Error: Invalid repository name: $REPO_NAME"
|
||||
exit 1
|
||||
fi
|
||||
echo "Checking out $repo to $REPO_NAME"
|
||||
git clone --depth 1 https://github.com/$repo.git $REPO_NAME
|
||||
done
|
||||
|
||||
- name: '🐍 Setup Python ${{ env.PYTHON_VERSION }}'
|
||||
uses: actions/setup-python@v6
|
||||
id: setup-python
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
- name: '📦 Install Initial Python Dependencies using uv'
|
||||
working-directory: langchain
|
||||
run: |
|
||||
python -m pip install -U uv
|
||||
python -m uv pip install --upgrade --no-cache-dir pip setuptools pyyaml
|
||||
|
||||
- name: '📦 Organize Library Directories'
|
||||
# Places cloned partner packages into libs/partners structure
|
||||
run: python langchain/.github/scripts/prep_api_docs_build.py
|
||||
|
||||
- name: '🧹 Clear Prior Build'
|
||||
run:
|
||||
# Remove artifacts from prior docs build
|
||||
rm -rf langchain-api-docs-html/api_reference_build/html
|
||||
|
||||
- name: '📦 Install Documentation Dependencies using uv'
|
||||
working-directory: langchain
|
||||
run: |
|
||||
# Install all partner packages in editable mode with overrides
|
||||
python -m uv pip install $(ls ./libs/partners | xargs -I {} echo "./libs/partners/{}") --overrides ./docs/vercel_overrides.txt
|
||||
|
||||
# Install core langchain and other main packages
|
||||
python -m uv pip install libs/core libs/langchain libs/text-splitters libs/community libs/experimental libs/standard-tests
|
||||
|
||||
# Install Sphinx and related packages for building docs
|
||||
python -m uv pip install -r docs/api_reference/requirements.txt
|
||||
|
||||
- name: '🔧 Configure Git Settings'
|
||||
working-directory: langchain
|
||||
run: |
|
||||
git config --local user.email "actions@github.com"
|
||||
git config --local user.name "Github Actions"
|
||||
|
||||
- name: '📚 Build API Documentation'
|
||||
working-directory: langchain
|
||||
run: |
|
||||
# Generate the API reference RST files
|
||||
python docs/api_reference/create_api_rst.py
|
||||
|
||||
# Build the HTML documentation using Sphinx
|
||||
# -T: show full traceback on exception
|
||||
# -E: don't use cached environment (force rebuild, ignore cached doctrees)
|
||||
# -b html: build HTML docs (vs PDS, etc.)
|
||||
# -d: path for the cached environment (parsed document trees / doctrees)
|
||||
# - Separate from output dir for faster incremental builds
|
||||
# -c: path to conf.py
|
||||
# -j auto: parallel build using all available CPU cores
|
||||
python -m sphinx -T -E -b html -d ../langchain-api-docs-html/_build/doctrees -c docs/api_reference docs/api_reference ../langchain-api-docs-html/api_reference_build/html -j auto
|
||||
|
||||
# Post-process the generated HTML
|
||||
python docs/api_reference/scripts/custom_formatter.py ../langchain-api-docs-html/api_reference_build/html
|
||||
|
||||
# Default index page is blank so we copy in the actual home page.
|
||||
cp ../langchain-api-docs-html/api_reference_build/html/{reference,index}.html
|
||||
|
||||
# Removes Sphinx's intermediate build artifacts after the build is complete.
|
||||
rm -rf ../langchain-api-docs-html/_build/
|
||||
|
||||
# Commit and push changes to langchain-api-docs-html repo
|
||||
- uses: EndBug/add-and-commit@v9
|
||||
with:
|
||||
cwd: langchain-api-docs-html
|
||||
message: 'Update API docs build'
|
||||
30
.github/workflows/check-broken-links.yml
vendored
30
.github/workflows/check-broken-links.yml
vendored
@@ -1,30 +0,0 @@
|
||||
# Runs broken link checker in /docs on a daily schedule.
|
||||
|
||||
name: '🔗 Check Broken Links'
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 13 * * *' # Runs daily at 1PM UTC (9AM EDT/6AM PDT)
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
check-links:
|
||||
if: github.repository_owner == 'langchain-ai' || github.event_name != 'schedule'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- name: '🟢 Setup Node.js 18.x'
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: 18.x
|
||||
cache: "yarn"
|
||||
cache-dependency-path: ./docs/yarn.lock
|
||||
- name: '📦 Install Node Dependencies'
|
||||
run: yarn install --immutable --mode=skip-build
|
||||
working-directory: ./docs
|
||||
- name: '🔍 Scan Documentation for Broken Links'
|
||||
run: yarn check-broken-links
|
||||
working-directory: ./docs
|
||||
73
.github/workflows/check_diffs.yml
vendored
73
.github/workflows/check_diffs.yml
vendored
@@ -6,14 +6,13 @@
|
||||
# - Linting (_lint.yml)
|
||||
# - Unit Tests (_test.yml)
|
||||
# - Pydantic compatibility tests (_test_pydantic.yml)
|
||||
# - Documentation import tests (_test_doc_imports.yml)
|
||||
# - Integration test compilation checks (_compile_integration_test.yml)
|
||||
# - Extended test suites that require additional dependencies
|
||||
# - Codspeed benchmarks (if not labeled 'codspeed-ignore')
|
||||
#
|
||||
# Reports status to GitHub checks and PR status.
|
||||
|
||||
name: '🔧 CI'
|
||||
name: "🔧 CI"
|
||||
|
||||
on:
|
||||
push:
|
||||
@@ -43,20 +42,20 @@ jobs:
|
||||
# This job analyzes which files changed and creates a dynamic test matrix
|
||||
# to only run tests/lints for the affected packages, improving CI efficiency
|
||||
build:
|
||||
name: 'Detect Changes & Set Matrix'
|
||||
name: "Detect Changes & Set Matrix"
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'ci-ignore') }}
|
||||
steps:
|
||||
- name: '📋 Checkout Code'
|
||||
- name: "📋 Checkout Code"
|
||||
uses: actions/checkout@v5
|
||||
- name: '🐍 Setup Python 3.11'
|
||||
- name: "🐍 Setup Python 3.11"
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: '📂 Get Changed Files'
|
||||
python-version: "3.11"
|
||||
- name: "📂 Get Changed Files"
|
||||
id: files
|
||||
uses: Ana06/get-changed-files@v2.3.0
|
||||
- name: '🔍 Analyze Changed Files & Generate Build Matrix'
|
||||
- name: "🔍 Analyze Changed Files & Generate Build Matrix"
|
||||
id: set-matrix
|
||||
run: |
|
||||
python -m pip install packaging requests
|
||||
@@ -67,12 +66,11 @@ jobs:
|
||||
extended-tests: ${{ steps.set-matrix.outputs.extended-tests }}
|
||||
compile-integration-tests: ${{ steps.set-matrix.outputs.compile-integration-tests }}
|
||||
dependencies: ${{ steps.set-matrix.outputs.dependencies }}
|
||||
test-doc-imports: ${{ steps.set-matrix.outputs.test-doc-imports }}
|
||||
test-pydantic: ${{ steps.set-matrix.outputs.test-pydantic }}
|
||||
codspeed: ${{ steps.set-matrix.outputs.codspeed }}
|
||||
# Run linting only on packages that have changed files
|
||||
lint:
|
||||
needs: [ build ]
|
||||
needs: [build]
|
||||
if: ${{ needs.build.outputs.lint != '[]' }}
|
||||
strategy:
|
||||
matrix:
|
||||
@@ -86,7 +84,7 @@ jobs:
|
||||
|
||||
# Run unit tests only on packages that have changed files
|
||||
test:
|
||||
needs: [ build ]
|
||||
needs: [build]
|
||||
if: ${{ needs.build.outputs.test != '[]' }}
|
||||
strategy:
|
||||
matrix:
|
||||
@@ -100,7 +98,7 @@ jobs:
|
||||
|
||||
# Test compatibility with different Pydantic versions for affected packages
|
||||
test-pydantic:
|
||||
needs: [ build ]
|
||||
needs: [build]
|
||||
if: ${{ needs.build.outputs.test-pydantic != '[]' }}
|
||||
strategy:
|
||||
matrix:
|
||||
@@ -112,22 +110,10 @@ jobs:
|
||||
pydantic-version: ${{ matrix.job-configs.pydantic-version }}
|
||||
secrets: inherit
|
||||
|
||||
test-doc-imports:
|
||||
needs: [ build ]
|
||||
if: ${{ needs.build.outputs.test-doc-imports != '[]' }}
|
||||
strategy:
|
||||
matrix:
|
||||
job-configs: ${{ fromJson(needs.build.outputs.test-doc-imports) }}
|
||||
fail-fast: false
|
||||
uses: ./.github/workflows/_test_doc_imports.yml
|
||||
with:
|
||||
python-version: ${{ matrix.job-configs.python-version }}
|
||||
secrets: inherit
|
||||
|
||||
# Verify integration tests compile without actually running them (faster feedback)
|
||||
compile-integration-tests:
|
||||
name: 'Compile Integration Tests'
|
||||
needs: [ build ]
|
||||
name: "Compile Integration Tests"
|
||||
needs: [build]
|
||||
if: ${{ needs.build.outputs.compile-integration-tests != '[]' }}
|
||||
strategy:
|
||||
matrix:
|
||||
@@ -141,8 +127,8 @@ jobs:
|
||||
|
||||
# Run extended test suites that require additional dependencies
|
||||
extended-tests:
|
||||
name: 'Extended Tests'
|
||||
needs: [ build ]
|
||||
name: "Extended Tests"
|
||||
needs: [build]
|
||||
if: ${{ needs.build.outputs.extended-tests != '[]' }}
|
||||
strategy:
|
||||
matrix:
|
||||
@@ -157,14 +143,14 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
- name: '🐍 Set up Python ${{ matrix.job-configs.python-version }} + UV'
|
||||
- name: "🐍 Set up Python ${{ matrix.job-configs.python-version }} + UV"
|
||||
uses: "./.github/actions/uv_setup"
|
||||
with:
|
||||
python-version: ${{ matrix.job-configs.python-version }}
|
||||
cache-suffix: extended-tests-${{ matrix.job-configs.working-directory }}
|
||||
working-directory: ${{ matrix.job-configs.working-directory }}
|
||||
|
||||
- name: '📦 Install Dependencies & Run Extended Tests'
|
||||
- name: "📦 Install Dependencies & Run Extended Tests"
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Running extended tests, installing dependencies with uv..."
|
||||
@@ -173,7 +159,7 @@ jobs:
|
||||
VIRTUAL_ENV=.venv uv pip install -r extended_testing_deps.txt
|
||||
VIRTUAL_ENV=.venv make extended_tests
|
||||
|
||||
- name: '🧹 Verify Clean Working Directory'
|
||||
- name: "🧹 Verify Clean Working Directory"
|
||||
shell: bash
|
||||
run: |
|
||||
set -eu
|
||||
@@ -187,8 +173,8 @@ jobs:
|
||||
|
||||
# Run codspeed benchmarks only on packages that have changed files
|
||||
codspeed:
|
||||
name: '⚡ CodSpeed Benchmarks'
|
||||
needs: [ build ]
|
||||
name: "⚡ CodSpeed Benchmarks"
|
||||
needs: [build]
|
||||
if: ${{ needs.build.outputs.codspeed != '[]' && !contains(github.event.pull_request.labels.*.name, 'codspeed-ignore') }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
@@ -199,7 +185,7 @@ jobs:
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
# We have to use 3.12 as 3.13 is not yet supported
|
||||
- name: '📦 Install UV Package Manager'
|
||||
- name: "📦 Install UV Package Manager"
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
python-version: "3.12"
|
||||
@@ -208,11 +194,11 @@ jobs:
|
||||
with:
|
||||
python-version: "3.12"
|
||||
|
||||
- name: '📦 Install Test Dependencies'
|
||||
- name: "📦 Install Test Dependencies"
|
||||
run: uv sync --group test
|
||||
working-directory: ${{ matrix.job-configs.working-directory }}
|
||||
|
||||
- name: '⚡ Run Benchmarks: ${{ matrix.job-configs.working-directory }}'
|
||||
- name: "⚡ Run Benchmarks: ${{ matrix.job-configs.working-directory }}"
|
||||
uses: CodSpeedHQ/action@v4
|
||||
env:
|
||||
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
@@ -249,8 +235,17 @@ jobs:
|
||||
|
||||
# Final status check - ensures all required jobs passed before allowing merge
|
||||
ci_success:
|
||||
name: '✅ CI Success'
|
||||
needs: [build, lint, test, compile-integration-tests, extended-tests, test-doc-imports, test-pydantic, codspeed]
|
||||
name: "✅ CI Success"
|
||||
needs:
|
||||
[
|
||||
build,
|
||||
lint,
|
||||
test,
|
||||
compile-integration-tests,
|
||||
extended-tests,
|
||||
test-pydantic,
|
||||
codspeed,
|
||||
]
|
||||
if: |
|
||||
always()
|
||||
runs-on: ubuntu-latest
|
||||
@@ -259,7 +254,7 @@ jobs:
|
||||
RESULTS_JSON: ${{ toJSON(needs.*.result) }}
|
||||
EXIT_CODE: ${{!contains(needs.*.result, 'failure') && !contains(needs.*.result, 'cancelled') && '0' || '1'}}
|
||||
steps:
|
||||
- name: '🎉 All Checks Passed'
|
||||
- name: "🎉 All Checks Passed"
|
||||
run: |
|
||||
echo $JOBS_JSON
|
||||
echo $RESULTS_JSON
|
||||
|
||||
41
.github/workflows/check_new_docs.yml
vendored
41
.github/workflows/check_new_docs.yml
vendored
@@ -1,41 +0,0 @@
|
||||
# For integrations, we run check_templates.py to ensure that new docs use the correct
|
||||
# templates based on their type. See the script for more details.
|
||||
|
||||
name: '📑 Integration Docs Lint'
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
pull_request:
|
||||
|
||||
# If another push to the same PR or branch happens while this workflow is still running,
|
||||
# cancel the earlier run in favor of the next run.
|
||||
#
|
||||
# There's no point in testing an outdated version of the code. GitHub only allows
|
||||
# a limited number of job runners to be active at the same time, so it's better to cancel
|
||||
# pointless jobs early so that more useful jobs can run sooner.
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- id: files
|
||||
uses: Ana06/get-changed-files@v2.3.0
|
||||
with:
|
||||
filter: |
|
||||
*.ipynb
|
||||
*.md
|
||||
*.mdx
|
||||
- name: '🔍 Check New Documentation Templates'
|
||||
run: |
|
||||
python docs/scripts/check_templates.py ${{ steps.files.outputs.added }}
|
||||
30
.github/workflows/people.yml
vendored
30
.github/workflows/people.yml
vendored
@@ -1,30 +0,0 @@
|
||||
# Updates the LangChain People data by fetching the latest info from the LangChain Git.
|
||||
# TODO: broken/not used
|
||||
|
||||
name: '👥 LangChain People'
|
||||
run-name: 'Update People Data'
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 14 1 * *" # Runs at 14:00 UTC on the 1st of every month (10AM EDT/7AM PDT)
|
||||
push:
|
||||
branches: [jacob/people]
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
langchain-people:
|
||||
if: github.repository_owner == 'langchain-ai' || github.event_name != 'schedule'
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: '📋 Dump GitHub Context'
|
||||
env:
|
||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
- uses: actions/checkout@v5
|
||||
# Ref: https://github.com/actions/runner/issues/2033
|
||||
- name: '🔧 Fix Git Safe Directory in Container'
|
||||
run: mkdir -p /home/runner/work/_temp/_github_home && printf "[safe]\n\tdirectory = /github/workspace" > /home/runner/work/_temp/_github_home/.gitconfig
|
||||
- uses: ./.github/actions/people
|
||||
with:
|
||||
token: ${{ secrets.LANGCHAIN_PEOPLE_GITHUB_TOKEN }}
|
||||
2
.github/workflows/pr_lint.yml
vendored
2
.github/workflows/pr_lint.yml
vendored
@@ -15,7 +15,7 @@
|
||||
# Allowed Types:
|
||||
# * feat — a new feature (MINOR)
|
||||
# * fix — a bug fix (PATCH)
|
||||
# * docs — documentation only changes (either in /docs or code comments)
|
||||
# * docs — documentation only changes
|
||||
# * style — formatting, linting, etc.; no code change or typing refactors
|
||||
# * refactor — code change that neither fixes a bug nor adds a feature
|
||||
# * perf — code change that improves performance
|
||||
|
||||
79
.github/workflows/run_notebooks.yml
vendored
79
.github/workflows/run_notebooks.yml
vendored
@@ -1,79 +0,0 @@
|
||||
# Integration tests for documentation notebooks.
|
||||
|
||||
name: '📓 Validate Documentation Notebooks'
|
||||
run-name: 'Test notebooks in ${{ inputs.working-directory }}'
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
python_version:
|
||||
description: 'Python version'
|
||||
required: false
|
||||
default: '3.11'
|
||||
working-directory:
|
||||
description: 'Working directory or subset (e.g., docs/docs/tutorials/llm_chain.ipynb or docs/docs/how_to)'
|
||||
required: false
|
||||
default: 'all'
|
||||
schedule:
|
||||
- cron: '0 13 * * *'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
UV_FROZEN: "true"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository == 'langchain-ai/langchain' || github.event_name != 'schedule'
|
||||
name: '📑 Test Documentation Notebooks'
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
- name: '🐍 Set up Python + UV'
|
||||
uses: "./.github/actions/uv_setup"
|
||||
with:
|
||||
python-version: ${{ github.event.inputs.python_version || '3.11' }}
|
||||
cache-suffix: run-notebooks-${{ github.event.inputs.working-directory || 'all' }}
|
||||
working-directory: ${{ github.event.inputs.working-directory || '**' }}
|
||||
|
||||
- name: '🔐 Authenticate to Google Cloud'
|
||||
id: 'auth'
|
||||
uses: google-github-actions/auth@v3
|
||||
with:
|
||||
credentials_json: '${{ secrets.GOOGLE_CREDENTIALS }}'
|
||||
|
||||
- name: '🔐 Configure AWS Credentials'
|
||||
uses: aws-actions/configure-aws-credentials@v5
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: ${{ secrets.AWS_REGION }}
|
||||
|
||||
- name: '📦 Install Dependencies'
|
||||
run: |
|
||||
uv sync --group dev --group test
|
||||
|
||||
- name: '📦 Pre-download Test Files'
|
||||
run: |
|
||||
uv run python docs/scripts/cache_data.py
|
||||
curl -s https://raw.githubusercontent.com/lerocha/chinook-database/master/ChinookDatabase/DataSources/Chinook_Sqlite.sql | sqlite3 docs/docs/how_to/Chinook.db
|
||||
cp docs/docs/how_to/Chinook.db docs/docs/tutorials/Chinook.db
|
||||
|
||||
- name: '🔧 Prepare Notebooks for CI'
|
||||
run: |
|
||||
uv run python docs/scripts/prepare_notebooks_for_ci.py --comment-install-cells --working-directory ${{ github.event.inputs.working-directory || 'all' }}
|
||||
|
||||
- name: '🚀 Execute Notebooks'
|
||||
env:
|
||||
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
FIREWORKS_API_KEY: ${{ secrets.FIREWORKS_API_KEY }}
|
||||
GOOGLE_API_KEY: ${{ secrets.GOOGLE_API_KEY }}
|
||||
GROQ_API_KEY: ${{ secrets.GROQ_API_KEY }}
|
||||
MISTRAL_API_KEY: ${{ secrets.MISTRAL_API_KEY }}
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
TAVILY_API_KEY: ${{ secrets.TAVILY_API_KEY }}
|
||||
TOGETHER_API_KEY: ${{ secrets.TOGETHER_API_KEY }}
|
||||
WORKING_DIRECTORY: ${{ github.event.inputs.working-directory || 'all' }}
|
||||
run: |
|
||||
./docs/scripts/execute_notebooks.sh $WORKING_DIRECTORY
|
||||
1
.github/workflows/v1_changes.md
vendored
1
.github/workflows/v1_changes.md
vendored
@@ -5,5 +5,4 @@ in the new docs repo:
|
||||
- people.yml: Need to fix and somehow display on the new docs site
|
||||
- Subsequently, `.github/actions/people/`
|
||||
- _test_doc_imports.yml
|
||||
- check_new_docs.yml
|
||||
- check-broken-links.yml
|
||||
|
||||
Reference in New Issue
Block a user